Update for Vulkan-Docs 1.3.246

This commit is contained in:
Jon Leech 2023-03-31 03:21:46 -07:00 committed by Jon Leech
parent 0c34d02861
commit 63af1cf1ee
15 changed files with 17290 additions and 3985 deletions

View file

@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
static_assert( VK_HEADER_VERSION == 245, "Wrong VK_HEADER_VERSION!" );
static_assert( VK_HEADER_VERSION == 246, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@ -140,13 +140,16 @@ static_assert( VK_HEADER_VERSION == 245, "Wrong VK_HEADER_VERSION!" );
# undef MemoryBarrier
#endif
#if defined( __GNUC__ )
# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
#endif
#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
# if defined( __clang__ )
# if __has_feature( cxx_unrestricted_unions )
# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
# endif
# elif defined( __GNUC__ )
# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
# if 40600 <= GCC_VERSION
# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
# endif
@ -181,11 +184,16 @@ static_assert( VK_HEADER_VERSION == 245, "Wrong VK_HEADER_VERSION!" );
#if defined( __cpp_constexpr )
# define VULKAN_HPP_CONSTEXPR constexpr
# if __cpp_constexpr >= 201304
# if 201304 <= __cpp_constexpr
# define VULKAN_HPP_CONSTEXPR_14 constexpr
# else
# define VULKAN_HPP_CONSTEXPR_14
# endif
# if ( 201907 <= __cpp_constexpr ) && ( !defined( __GNUC__ ) || ( 110300 < GCC_VERSION ) )
# define VULKAN_HPP_CONSTEXPR_20 constexpr
# else
# define VULKAN_HPP_CONSTEXPR_20
# endif
# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
#else
# define VULKAN_HPP_CONSTEXPR
@ -5842,6 +5850,35 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
}
//=== VK_EXT_shader_object ===
VkResult vkCreateShadersEXT( VkDevice device,
uint32_t createInfoCount,
const VkShaderCreateInfoEXT * pCreateInfos,
const VkAllocationCallbacks * pAllocator,
VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders );
}
void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyShaderEXT( device, shader, pAllocator );
}
VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData );
}
void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer,
uint32_t stageCount,
const VkShaderStageFlagBits * pStages,
const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders );
}
//=== VK_QCOM_tile_properties ===
VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device,
@ -6487,6 +6524,13 @@ namespace VULKAN_HPP_NAMESPACE
CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
};
class IncompatibleShaderBinaryEXTError : public SystemError
{
public:
IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
};
namespace
{
[[noreturn]] void throwResultException( Result result, char const * message )
@ -6531,7 +6575,8 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
default: throw SystemError( make_error_code( result ) );
case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message );
default: throw SystemError( make_error_code( result ), message );
}
}
} // namespace
@ -7785,6 +7830,14 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfo, ShaderCreateInfoEXT>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, PhysicalDeviceFeatures2>
{
enum
@ -11765,6 +11818,32 @@ namespace VULKAN_HPP_NAMESPACE
};
};
//=== VK_EXT_shader_tile_image ===
template <>
struct StructExtends<PhysicalDeviceShaderTileImageFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderTileImageFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderTileImagePropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
//=== VK_EXT_opacity_micromap ===
template <>
struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>
@ -12375,6 +12454,32 @@ namespace VULKAN_HPP_NAMESPACE
};
};
//=== VK_EXT_shader_object ===
template <>
struct StructExtends<PhysicalDeviceShaderObjectFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderObjectFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderObjectPropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
//=== VK_QCOM_tile_properties ===
template <>
struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>
@ -13710,6 +13815,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
@ -14992,6 +15103,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
@ -15943,6 +16060,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =

View file

@ -485,6 +485,51 @@ typedef struct VkVideoEncodeH265RateControlLayerInfoEXT {
} VkVideoEncodeH265RateControlLayerInfoEXT;
#define VK_NV_displacement_micromap 1
#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 1
#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap"
typedef enum VkDisplacementMicromapFormatNV {
VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1,
VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2,
VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3,
VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF
} VkDisplacementMicromapFormatNV;
typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV {
VkStructureType sType;
void* pNext;
VkBool32 displacementMicromap;
} VkPhysicalDeviceDisplacementMicromapFeaturesNV;
typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV {
VkStructureType sType;
void* pNext;
uint32_t maxDisplacementMicromapSubdivisionLevel;
} VkPhysicalDeviceDisplacementMicromapPropertiesNV;
typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV {
VkStructureType sType;
void* pNext;
VkFormat displacementBiasAndScaleFormat;
VkFormat displacementVectorFormat;
VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer;
VkDeviceSize displacementBiasAndScaleStride;
VkDeviceOrHostAddressConstKHR displacementVectorBuffer;
VkDeviceSize displacementVectorStride;
VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags;
VkDeviceSize displacedMicromapPrimitiveFlagsStride;
VkIndexType indexType;
VkDeviceOrHostAddressConstKHR indexBuffer;
VkDeviceSize indexStride;
uint32_t baseTriangle;
uint32_t usageCountsCount;
const VkMicromapUsageEXT* pUsageCounts;
const VkMicromapUsageEXT* const* ppUsageCounts;
VkMicromapEXT micromap;
} VkAccelerationStructureTrianglesDisplacementMicromapNV;
#ifdef __cplusplus
}
#endif

View file

@ -68,7 +68,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
#define VK_HEADER_VERSION 245
#define VK_HEADER_VERSION 246
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@ -185,6 +185,7 @@ typedef enum VkResult {
VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000,
#endif
VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000,
VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000,
VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
@ -933,6 +934,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT = 1000395000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT = 1000395001,
VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000,
VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001,
VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002,
@ -999,6 +1002,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001,
VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000,
VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000,
@ -1186,6 +1192,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkStructureType;
@ -1290,6 +1297,7 @@ typedef enum VkObjectType {
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
VK_OBJECT_TYPE_SHADER_EXT = 1000482000,
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
@ -15234,6 +15242,27 @@ typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT {
#define VK_EXT_shader_tile_image 1
#define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1
#define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image"
typedef struct VkPhysicalDeviceShaderTileImageFeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 shaderTileImageColorReadAccess;
VkBool32 shaderTileImageDepthReadAccess;
VkBool32 shaderTileImageStencilReadAccess;
} VkPhysicalDeviceShaderTileImageFeaturesEXT;
typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT {
VkStructureType sType;
void* pNext;
VkBool32 shaderTileImageCoherentReadAccelerated;
VkBool32 shaderTileImageReadSampleFromPixelRateInvocation;
VkBool32 shaderTileImageReadFromHelperInvocation;
} VkPhysicalDeviceShaderTileImagePropertiesEXT;
#define VK_EXT_opacity_micromap 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT)
#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2
@ -15505,51 +15534,6 @@ VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT(
#endif
#define VK_NV_displacement_micromap 1
#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 1
#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap"
typedef enum VkDisplacementMicromapFormatNV {
VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1,
VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2,
VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3,
VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF
} VkDisplacementMicromapFormatNV;
typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV {
VkStructureType sType;
void* pNext;
VkBool32 displacementMicromap;
} VkPhysicalDeviceDisplacementMicromapFeaturesNV;
typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV {
VkStructureType sType;
void* pNext;
uint32_t maxDisplacementMicromapSubdivisionLevel;
} VkPhysicalDeviceDisplacementMicromapPropertiesNV;
typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV {
VkStructureType sType;
void* pNext;
VkFormat displacementBiasAndScaleFormat;
VkFormat displacementVectorFormat;
VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer;
VkDeviceSize displacementBiasAndScaleStride;
VkDeviceOrHostAddressConstKHR displacementVectorBuffer;
VkDeviceSize displacementVectorStride;
VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags;
VkDeviceSize displacedMicromapPrimitiveFlagsStride;
VkIndexType indexType;
VkDeviceOrHostAddressConstKHR indexBuffer;
VkDeviceSize indexStride;
uint32_t baseTriangle;
uint32_t usageCountsCount;
const VkMicromapUsageEXT* pUsageCounts;
const VkMicromapUsageEXT* const* ppUsageCounts;
VkMicromapEXT micromap;
} VkAccelerationStructureTrianglesDisplacementMicromapNV;
#define VK_EXT_load_store_op_none 1
#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1
#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none"
@ -16458,6 +16442,92 @@ typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT {
#define VK_EXT_shader_object 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT)
#define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1
#define VK_EXT_SHADER_OBJECT_EXTENSION_NAME "VK_EXT_shader_object"
typedef enum VkShaderCodeTypeEXT {
VK_SHADER_CODE_TYPE_BINARY_EXT = 0,
VK_SHADER_CODE_TYPE_SPIRV_EXT = 1,
VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
} VkShaderCodeTypeEXT;
typedef enum VkShaderCreateFlagBitsEXT {
VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001,
VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002,
VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004,
VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008,
VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010,
VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020,
VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040,
VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkShaderCreateFlagBitsEXT;
typedef VkFlags VkShaderCreateFlagsEXT;
typedef struct VkPhysicalDeviceShaderObjectFeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 shaderObject;
} VkPhysicalDeviceShaderObjectFeaturesEXT;
typedef struct VkPhysicalDeviceShaderObjectPropertiesEXT {
VkStructureType sType;
void* pNext;
uint8_t shaderBinaryUUID[VK_UUID_SIZE];
uint32_t shaderBinaryVersion;
} VkPhysicalDeviceShaderObjectPropertiesEXT;
typedef struct VkShaderCreateInfoEXT {
VkStructureType sType;
const void* pNext;
VkShaderCreateFlagsEXT flags;
VkShaderStageFlagBits stage;
VkShaderStageFlags nextStage;
VkShaderCodeTypeEXT codeType;
size_t codeSize;
const void* pCode;
const char* pName;
uint32_t setLayoutCount;
const VkDescriptorSetLayout* pSetLayouts;
uint32_t pushConstantRangeCount;
const VkPushConstantRange* pPushConstantRanges;
const VkSpecializationInfo* pSpecializationInfo;
} VkShaderCreateInfoEXT;
typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT;
typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders);
typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData);
typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT(
VkDevice device,
uint32_t createInfoCount,
const VkShaderCreateInfoEXT* pCreateInfos,
const VkAllocationCallbacks* pAllocator,
VkShaderEXT* pShaders);
VKAPI_ATTR void VKAPI_CALL vkDestroyShaderEXT(
VkDevice device,
VkShaderEXT shader,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderBinaryDataEXT(
VkDevice device,
VkShaderEXT shader,
size_t* pDataSize,
void* pData);
VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT(
VkCommandBuffer commandBuffer,
uint32_t stageCount,
const VkShaderStageFlagBits* pStages,
const VkShaderEXT* pShaders);
#endif
#define VK_QCOM_tile_properties 1
#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1
#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties"

View file

@ -80,7 +80,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT
eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT
};
enum class StructureType
@ -989,6 +990,8 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT,
ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT,
eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
@ -1055,6 +1058,10 @@ namespace VULKAN_HPP_NAMESPACE
eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT,
ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT,
eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT,
eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
@ -1130,7 +1137,8 @@ namespace VULKAN_HPP_NAMESPACE
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV
eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT
};
enum class VendorId
@ -6471,6 +6479,37 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
};
//=== VK_EXT_shader_object ===
enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT
{
eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT,
eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT,
eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT,
eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT,
eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT,
eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT
};
using ShaderCreateFlagsEXT = Flags<ShaderCreateFlagBitsEXT>;
template <>
struct FlagTraits<ShaderCreateFlagBitsEXT>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags =
ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups |
ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment |
ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment;
};
enum class ShaderCodeTypeEXT
{
eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT,
eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT
};
//=== VK_NV_ray_tracing_invocation_reorder ===
enum class RayTracingInvocationReorderModeNV

View file

@ -0,0 +1,321 @@
// Copyright 2015-2023 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
#ifndef VULKAN_EXTENSION_INSPECTION_HPP
#define VULKAN_EXTENSION_INSPECTION_HPP
#include <vulkan/vulkan.hpp>
namespace VULKAN_HPP_NAMESPACE
{
//======================================
//=== Extension inspection functions ===
//======================================
VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & name );
VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & name );
VULKAN_HPP_CONSTEXPR_20 bool isExtensionDeprecated( std::string const & name );
VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & name );
//=====================================================
//=== Extension inspection function implementations ===
//=====================================================
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & name )
{
if ( name == "VK_EXT_debug_report" )
{
return "VK_EXT_debug_utils";
}
if ( name == "VK_NV_glsl_shader" )
{
return "";
}
if ( name == "VK_NV_dedicated_allocation" )
{
return "VK_KHR_dedicated_allocation";
}
if ( name == "VK_AMD_gpu_shader_half_float" )
{
return "VK_KHR_shader_float16_int8";
}
if ( name == "VK_IMG_format_pvrtc" )
{
return "";
}
if ( name == "VK_NV_external_memory_capabilities" )
{
return "VK_KHR_external_memory_capabilities";
}
if ( name == "VK_NV_external_memory" )
{
return "VK_KHR_external_memory";
}
#if defined( VK_USE_PLATFORM_WIN32_KHR )
if ( name == "VK_NV_external_memory_win32" )
{
return "VK_KHR_external_memory_win32";
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
if ( name == "VK_EXT_validation_flags" )
{
return "VK_EXT_validation_features";
}
if ( name == "VK_EXT_shader_subgroup_ballot" )
{
return "VK_VERSION_1_2";
}
if ( name == "VK_EXT_shader_subgroup_vote" )
{
return "VK_VERSION_1_1";
}
#if defined( VK_USE_PLATFORM_IOS_MVK )
if ( name == "VK_MVK_ios_surface" )
{
return "VK_EXT_metal_surface";
}
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
if ( name == "VK_MVK_macos_surface" )
{
return "VK_EXT_metal_surface";
}
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
if ( name == "VK_AMD_gpu_shader_int16" )
{
return "VK_KHR_shader_float16_int8";
}
if ( name == "VK_EXT_buffer_device_address" )
{
return "VK_KHR_buffer_device_address";
}
return "";
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & name )
{
return ( name == "VK_KHR_swapchain" ) || ( name == "VK_KHR_display_swapchain" ) || ( name == "VK_NV_glsl_shader" ) ||
( name == "VK_EXT_depth_range_unrestricted" ) || ( name == "VK_KHR_sampler_mirror_clamp_to_edge" ) || ( name == "VK_IMG_filter_cubic" ) ||
( name == "VK_AMD_rasterization_order" ) || ( name == "VK_AMD_shader_trinary_minmax" ) || ( name == "VK_AMD_shader_explicit_vertex_parameter" ) ||
( name == "VK_EXT_debug_marker" ) || ( name == "VK_KHR_video_queue" ) || ( name == "VK_KHR_video_decode_queue" ) ||
( name == "VK_AMD_gcn_shader" ) || ( name == "VK_NV_dedicated_allocation" ) || ( name == "VK_EXT_transform_feedback" ) ||
( name == "VK_NVX_binary_import" ) || ( name == "VK_NVX_image_view_handle" ) || ( name == "VK_AMD_draw_indirect_count" ) ||
( name == "VK_AMD_negative_viewport_height" ) || ( name == "VK_AMD_gpu_shader_half_float" ) || ( name == "VK_AMD_shader_ballot" ) ||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
( name == "VK_EXT_video_encode_h264" ) || ( name == "VK_EXT_video_encode_h265" ) ||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
( name == "VK_KHR_video_decode_h264" ) || ( name == "VK_AMD_texture_gather_bias_lod" ) || ( name == "VK_AMD_shader_info" ) ||
( name == "VK_KHR_dynamic_rendering" ) || ( name == "VK_AMD_shader_image_load_store_lod" ) || ( name == "VK_NV_corner_sampled_image" ) ||
( name == "VK_KHR_multiview" ) || ( name == "VK_IMG_format_pvrtc" ) || ( name == "VK_NV_external_memory" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_NV_external_memory_win32" ) || ( name == "VK_NV_win32_keyed_mutex" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_device_group" ) || ( name == "VK_KHR_shader_draw_parameters" ) || ( name == "VK_EXT_shader_subgroup_ballot" ) ||
( name == "VK_EXT_shader_subgroup_vote" ) || ( name == "VK_EXT_texture_compression_astc_hdr" ) || ( name == "VK_EXT_astc_decode_mode" ) ||
( name == "VK_EXT_pipeline_robustness" ) || ( name == "VK_KHR_maintenance1" ) || ( name == "VK_KHR_external_memory" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_KHR_external_memory_win32" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_external_memory_fd" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_KHR_win32_keyed_mutex" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_external_semaphore" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_KHR_external_semaphore_win32" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_external_semaphore_fd" ) || ( name == "VK_KHR_push_descriptor" ) || ( name == "VK_EXT_conditional_rendering" ) ||
( name == "VK_KHR_shader_float16_int8" ) || ( name == "VK_KHR_16bit_storage" ) || ( name == "VK_KHR_incremental_present" ) ||
( name == "VK_KHR_descriptor_update_template" ) || ( name == "VK_NV_clip_space_w_scaling" ) || ( name == "VK_EXT_display_control" ) ||
( name == "VK_GOOGLE_display_timing" ) || ( name == "VK_NV_sample_mask_override_coverage" ) || ( name == "VK_NV_geometry_shader_passthrough" ) ||
( name == "VK_NV_viewport_array2" ) || ( name == "VK_NVX_multiview_per_view_attributes" ) || ( name == "VK_NV_viewport_swizzle" ) ||
( name == "VK_EXT_discard_rectangles" ) || ( name == "VK_EXT_conservative_rasterization" ) || ( name == "VK_EXT_depth_clip_enable" ) ||
( name == "VK_EXT_hdr_metadata" ) || ( name == "VK_KHR_imageless_framebuffer" ) || ( name == "VK_KHR_create_renderpass2" ) ||
( name == "VK_KHR_shared_presentable_image" ) || ( name == "VK_KHR_external_fence" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_KHR_external_fence_win32" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_external_fence_fd" ) || ( name == "VK_KHR_performance_query" ) || ( name == "VK_KHR_maintenance2" ) ||
( name == "VK_KHR_variable_pointers" ) || ( name == "VK_EXT_external_memory_dma_buf" ) || ( name == "VK_EXT_queue_family_foreign" ) ||
( name == "VK_KHR_dedicated_allocation" ) ||
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
( name == "VK_ANDROID_external_memory_android_hardware_buffer" ) ||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
( name == "VK_EXT_sampler_filter_minmax" ) || ( name == "VK_KHR_storage_buffer_storage_class" ) || ( name == "VK_AMD_gpu_shader_int16" ) ||
( name == "VK_AMD_mixed_attachment_samples" ) || ( name == "VK_AMD_shader_fragment_mask" ) || ( name == "VK_EXT_inline_uniform_block" ) ||
( name == "VK_EXT_shader_stencil_export" ) || ( name == "VK_EXT_sample_locations" ) || ( name == "VK_KHR_relaxed_block_layout" ) ||
( name == "VK_KHR_get_memory_requirements2" ) || ( name == "VK_KHR_image_format_list" ) || ( name == "VK_EXT_blend_operation_advanced" ) ||
( name == "VK_NV_fragment_coverage_to_color" ) || ( name == "VK_KHR_acceleration_structure" ) || ( name == "VK_KHR_ray_tracing_pipeline" ) ||
( name == "VK_KHR_ray_query" ) || ( name == "VK_NV_framebuffer_mixed_samples" ) || ( name == "VK_NV_fill_rectangle" ) ||
( name == "VK_NV_shader_sm_builtins" ) || ( name == "VK_EXT_post_depth_coverage" ) || ( name == "VK_KHR_sampler_ycbcr_conversion" ) ||
( name == "VK_KHR_bind_memory2" ) || ( name == "VK_EXT_image_drm_format_modifier" ) || ( name == "VK_EXT_validation_cache" ) ||
( name == "VK_EXT_descriptor_indexing" ) || ( name == "VK_EXT_shader_viewport_index_layer" ) ||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
( name == "VK_KHR_portability_subset" ) ||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
( name == "VK_NV_shading_rate_image" ) || ( name == "VK_NV_ray_tracing" ) || ( name == "VK_NV_representative_fragment_test" ) ||
( name == "VK_KHR_maintenance3" ) || ( name == "VK_KHR_draw_indirect_count" ) || ( name == "VK_EXT_filter_cubic" ) ||
( name == "VK_QCOM_render_pass_shader_resolve" ) || ( name == "VK_EXT_global_priority" ) || ( name == "VK_KHR_shader_subgroup_extended_types" ) ||
( name == "VK_KHR_8bit_storage" ) || ( name == "VK_EXT_external_memory_host" ) || ( name == "VK_AMD_buffer_marker" ) ||
( name == "VK_KHR_shader_atomic_int64" ) || ( name == "VK_KHR_shader_clock" ) || ( name == "VK_AMD_pipeline_compiler_control" ) ||
( name == "VK_EXT_calibrated_timestamps" ) || ( name == "VK_AMD_shader_core_properties" ) || ( name == "VK_KHR_video_decode_h265" ) ||
( name == "VK_KHR_global_priority" ) || ( name == "VK_AMD_memory_overallocation_behavior" ) || ( name == "VK_EXT_vertex_attribute_divisor" ) ||
#if defined( VK_USE_PLATFORM_GGP )
( name == "VK_GGP_frame_token" ) ||
#endif /*VK_USE_PLATFORM_GGP*/
( name == "VK_EXT_pipeline_creation_feedback" ) || ( name == "VK_KHR_driver_properties" ) || ( name == "VK_KHR_shader_float_controls" ) ||
( name == "VK_NV_shader_subgroup_partitioned" ) || ( name == "VK_KHR_depth_stencil_resolve" ) || ( name == "VK_KHR_swapchain_mutable_format" ) ||
( name == "VK_NV_compute_shader_derivatives" ) || ( name == "VK_NV_mesh_shader" ) || ( name == "VK_NV_fragment_shader_barycentric" ) ||
( name == "VK_NV_shader_image_footprint" ) || ( name == "VK_NV_scissor_exclusive" ) || ( name == "VK_NV_device_diagnostic_checkpoints" ) ||
( name == "VK_KHR_timeline_semaphore" ) || ( name == "VK_INTEL_shader_integer_functions2" ) || ( name == "VK_INTEL_performance_query" ) ||
( name == "VK_KHR_vulkan_memory_model" ) || ( name == "VK_EXT_pci_bus_info" ) || ( name == "VK_AMD_display_native_hdr" ) ||
( name == "VK_KHR_shader_terminate_invocation" ) || ( name == "VK_EXT_fragment_density_map" ) || ( name == "VK_EXT_scalar_block_layout" ) ||
( name == "VK_GOOGLE_hlsl_functionality1" ) || ( name == "VK_GOOGLE_decorate_string" ) || ( name == "VK_EXT_subgroup_size_control" ) ||
( name == "VK_KHR_fragment_shading_rate" ) || ( name == "VK_AMD_shader_core_properties2" ) || ( name == "VK_AMD_device_coherent_memory" ) ||
( name == "VK_EXT_shader_image_atomic_int64" ) || ( name == "VK_KHR_spirv_1_4" ) || ( name == "VK_EXT_memory_budget" ) ||
( name == "VK_EXT_memory_priority" ) || ( name == "VK_NV_dedicated_allocation_image_aliasing" ) ||
( name == "VK_KHR_separate_depth_stencil_layouts" ) || ( name == "VK_EXT_buffer_device_address" ) || ( name == "VK_EXT_tooling_info" ) ||
( name == "VK_EXT_separate_stencil_usage" ) || ( name == "VK_KHR_present_wait" ) || ( name == "VK_NV_cooperative_matrix" ) ||
( name == "VK_NV_coverage_reduction_mode" ) || ( name == "VK_EXT_fragment_shader_interlock" ) || ( name == "VK_EXT_ycbcr_image_arrays" ) ||
( name == "VK_KHR_uniform_buffer_standard_layout" ) || ( name == "VK_EXT_provoking_vertex" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_EXT_full_screen_exclusive" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_KHR_buffer_device_address" ) || ( name == "VK_EXT_line_rasterization" ) || ( name == "VK_EXT_shader_atomic_float" ) ||
( name == "VK_EXT_host_query_reset" ) || ( name == "VK_EXT_index_type_uint8" ) || ( name == "VK_EXT_extended_dynamic_state" ) ||
( name == "VK_KHR_deferred_host_operations" ) || ( name == "VK_KHR_pipeline_executable_properties" ) || ( name == "VK_KHR_map_memory2" ) ||
( name == "VK_EXT_shader_atomic_float2" ) || ( name == "VK_EXT_swapchain_maintenance1" ) ||
( name == "VK_EXT_shader_demote_to_helper_invocation" ) || ( name == "VK_NV_device_generated_commands" ) ||
( name == "VK_NV_inherited_viewport_scissor" ) || ( name == "VK_KHR_shader_integer_dot_product" ) || ( name == "VK_EXT_texel_buffer_alignment" ) ||
( name == "VK_QCOM_render_pass_transform" ) || ( name == "VK_EXT_device_memory_report" ) || ( name == "VK_EXT_robustness2" ) ||
( name == "VK_EXT_custom_border_color" ) || ( name == "VK_GOOGLE_user_type" ) || ( name == "VK_KHR_pipeline_library" ) ||
( name == "VK_NV_present_barrier" ) || ( name == "VK_KHR_shader_non_semantic_info" ) || ( name == "VK_KHR_present_id" ) ||
( name == "VK_EXT_private_data" ) || ( name == "VK_EXT_pipeline_creation_cache_control" ) ||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
( name == "VK_KHR_video_encode_queue" ) ||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
( name == "VK_NV_device_diagnostics_config" ) || ( name == "VK_QCOM_render_pass_store_ops" ) || ( name == "VK_NV_low_latency" ) ||
#if defined( VK_USE_PLATFORM_METAL_EXT )
( name == "VK_EXT_metal_objects" ) ||
#endif /*VK_USE_PLATFORM_METAL_EXT*/
( name == "VK_KHR_synchronization2" ) || ( name == "VK_EXT_descriptor_buffer" ) || ( name == "VK_EXT_graphics_pipeline_library" ) ||
( name == "VK_AMD_shader_early_and_late_fragment_tests" ) || ( name == "VK_KHR_fragment_shader_barycentric" ) ||
( name == "VK_KHR_shader_subgroup_uniform_control_flow" ) || ( name == "VK_KHR_zero_initialize_workgroup_memory" ) ||
( name == "VK_NV_fragment_shading_rate_enums" ) || ( name == "VK_NV_ray_tracing_motion_blur" ) || ( name == "VK_EXT_mesh_shader" ) ||
( name == "VK_EXT_ycbcr_2plane_444_formats" ) || ( name == "VK_EXT_fragment_density_map2" ) || ( name == "VK_QCOM_rotated_copy_commands" ) ||
( name == "VK_EXT_image_robustness" ) || ( name == "VK_KHR_workgroup_memory_explicit_layout" ) || ( name == "VK_KHR_copy_commands2" ) ||
( name == "VK_EXT_image_compression_control" ) || ( name == "VK_EXT_attachment_feedback_loop_layout" ) || ( name == "VK_EXT_4444_formats" ) ||
( name == "VK_EXT_device_fault" ) || ( name == "VK_ARM_rasterization_order_attachment_access" ) || ( name == "VK_EXT_rgba10x6_formats" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_NV_acquire_winrt_display" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_VALVE_mutable_descriptor_type" ) || ( name == "VK_EXT_vertex_input_dynamic_state" ) || ( name == "VK_EXT_physical_device_drm" ) ||
( name == "VK_EXT_device_address_binding_report" ) || ( name == "VK_EXT_depth_clip_control" ) ||
( name == "VK_EXT_primitive_topology_list_restart" ) || ( name == "VK_KHR_format_feature_flags2" ) ||
#if defined( VK_USE_PLATFORM_FUCHSIA )
( name == "VK_FUCHSIA_external_memory" ) || ( name == "VK_FUCHSIA_external_semaphore" ) || ( name == "VK_FUCHSIA_buffer_collection" ) ||
#endif /*VK_USE_PLATFORM_FUCHSIA*/
( name == "VK_HUAWEI_subpass_shading" ) || ( name == "VK_HUAWEI_invocation_mask" ) || ( name == "VK_NV_external_memory_rdma" ) ||
( name == "VK_EXT_pipeline_properties" ) || ( name == "VK_EXT_multisampled_render_to_single_sampled" ) ||
( name == "VK_EXT_extended_dynamic_state2" ) || ( name == "VK_EXT_color_write_enable" ) || ( name == "VK_EXT_primitives_generated_query" ) ||
( name == "VK_KHR_ray_tracing_maintenance1" ) || ( name == "VK_EXT_global_priority_query" ) || ( name == "VK_EXT_image_view_min_lod" ) ||
( name == "VK_EXT_multi_draw" ) || ( name == "VK_EXT_image_2d_view_of_3d" ) || ( name == "VK_EXT_shader_tile_image" ) ||
( name == "VK_EXT_opacity_micromap" ) ||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
( name == "VK_NV_displacement_micromap" ) ||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
( name == "VK_EXT_load_store_op_none" ) || ( name == "VK_HUAWEI_cluster_culling_shader" ) || ( name == "VK_EXT_border_color_swizzle" ) ||
( name == "VK_EXT_pageable_device_local_memory" ) || ( name == "VK_KHR_maintenance4" ) || ( name == "VK_ARM_shader_core_properties" ) ||
( name == "VK_EXT_image_sliced_view_of_3d" ) || ( name == "VK_VALVE_descriptor_set_host_mapping" ) || ( name == "VK_EXT_depth_clamp_zero_one" ) ||
( name == "VK_EXT_non_seamless_cube_map" ) || ( name == "VK_QCOM_fragment_density_map_offset" ) || ( name == "VK_NV_copy_memory_indirect" ) ||
( name == "VK_NV_memory_decompression" ) || ( name == "VK_NV_linear_color_attachment" ) ||
( name == "VK_EXT_image_compression_control_swapchain" ) || ( name == "VK_QCOM_image_processing" ) || ( name == "VK_EXT_extended_dynamic_state3" ) ||
( name == "VK_EXT_subpass_merge_feedback" ) || ( name == "VK_EXT_shader_module_identifier" ) ||
( name == "VK_EXT_rasterization_order_attachment_access" ) || ( name == "VK_NV_optical_flow" ) || ( name == "VK_EXT_legacy_dithering" ) ||
( name == "VK_EXT_pipeline_protected_access" ) || ( name == "VK_EXT_shader_object" ) || ( name == "VK_QCOM_tile_properties" ) ||
( name == "VK_SEC_amigo_profiling" ) || ( name == "VK_QCOM_multiview_per_view_viewports" ) || ( name == "VK_NV_ray_tracing_invocation_reorder" ) ||
( name == "VK_EXT_mutable_descriptor_type" ) || ( name == "VK_ARM_shader_core_builtins" ) || ( name == "VK_EXT_pipeline_library_group_handles" ) ||
( name == "VK_QCOM_multiview_per_view_render_areas" );
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isExtensionDeprecated( std::string const & name )
{
return ( name == "VK_EXT_debug_report" ) || ( name == "VK_NV_glsl_shader" ) || ( name == "VK_NV_dedicated_allocation" ) ||
( name == "VK_AMD_gpu_shader_half_float" ) || ( name == "VK_IMG_format_pvrtc" ) || ( name == "VK_NV_external_memory_capabilities" ) ||
( name == "VK_NV_external_memory" ) ||
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_NV_external_memory_win32" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_EXT_validation_flags" ) || ( name == "VK_EXT_shader_subgroup_ballot" ) || ( name == "VK_EXT_shader_subgroup_vote" ) ||
#if defined( VK_USE_PLATFORM_IOS_MVK )
( name == "VK_MVK_ios_surface" ) ||
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
( name == "VK_MVK_macos_surface" ) ||
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
( name == "VK_AMD_gpu_shader_int16" ) || ( name == "VK_EXT_buffer_device_address" );
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & name )
{
return ( name == "VK_KHR_surface" ) || ( name == "VK_KHR_display" ) ||
#if defined( VK_USE_PLATFORM_XLIB_KHR )
( name == "VK_KHR_xlib_surface" ) ||
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
( name == "VK_KHR_xcb_surface" ) ||
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
( name == "VK_KHR_wayland_surface" ) ||
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
( name == "VK_KHR_android_surface" ) ||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
( name == "VK_KHR_win32_surface" ) ||
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
( name == "VK_EXT_debug_report" ) ||
#if defined( VK_USE_PLATFORM_GGP )
( name == "VK_GGP_stream_descriptor_surface" ) ||
#endif /*VK_USE_PLATFORM_GGP*/
( name == "VK_NV_external_memory_capabilities" ) || ( name == "VK_KHR_get_physical_device_properties2" ) || ( name == "VK_EXT_validation_flags" ) ||
#if defined( VK_USE_PLATFORM_VI_NN )
( name == "VK_NN_vi_surface" ) ||
#endif /*VK_USE_PLATFORM_VI_NN*/
( name == "VK_KHR_device_group_creation" ) || ( name == "VK_KHR_external_memory_capabilities" ) ||
( name == "VK_KHR_external_semaphore_capabilities" ) || ( name == "VK_EXT_direct_mode_display" ) ||
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
( name == "VK_EXT_acquire_xlib_display" ) ||
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
( name == "VK_EXT_display_surface_counter" ) || ( name == "VK_EXT_swapchain_colorspace" ) || ( name == "VK_KHR_external_fence_capabilities" ) ||
( name == "VK_KHR_get_surface_capabilities2" ) || ( name == "VK_KHR_get_display_properties2" ) ||
#if defined( VK_USE_PLATFORM_IOS_MVK )
( name == "VK_MVK_ios_surface" ) ||
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
( name == "VK_MVK_macos_surface" ) ||
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
( name == "VK_EXT_debug_utils" ) ||
#if defined( VK_USE_PLATFORM_FUCHSIA )
( name == "VK_FUCHSIA_imagepipe_surface" ) ||
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
( name == "VK_EXT_metal_surface" ) ||
#endif /*VK_USE_PLATFORM_METAL_EXT*/
( name == "VK_KHR_surface_protected_capabilities" ) || ( name == "VK_EXT_validation_features" ) || ( name == "VK_EXT_headless_surface" ) ||
( name == "VK_EXT_surface_maintenance1" ) || ( name == "VK_EXT_acquire_drm_display" ) ||
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
( name == "VK_EXT_directfb_surface" ) ||
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
( name == "VK_QNX_screen_surface" ) ||
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
( name == "VK_KHR_portability_enumeration" ) || ( name == "VK_GOOGLE_surfaceless_query" ) || ( name == "VK_LUNARG_direct_driver_loading" );
}
} // namespace VULKAN_HPP_NAMESPACE
#endif

View file

@ -21516,6 +21516,317 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_object ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount,
const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateShadersEXT( m_device,
createInfoCount,
reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkShaderEXT *>( pShaders ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ShaderEXTAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
}
template <typename ShaderEXTAllocator,
typename Dispatch,
typename B0,
typename std::enable_if<std::is_same<typename B0::value_type, ShaderEXT>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::ShaderEXT shader;
VkResult result =
d.vkCreateShadersEXT( m_device,
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( &shader ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shader );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename ShaderEXTAllocator>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders;
uniqueShaders.reserve( createInfos.size() );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
for ( auto const & shader : shaders )
{
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
}
template <typename Dispatch,
typename ShaderEXTAllocator,
typename B0,
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator );
uniqueShaders.reserve( createInfos.size() );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
for ( auto const & shader : shaders )
{
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::ShaderEXT shader;
VkResult result =
d.vkCreateShadersEXT( m_device,
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( &shader ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device,
static_cast<VkShaderEXT>( shader ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device,
static_cast<VkShaderEXT>( shader ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> data;
size_t dataSize;
VkResult result;
do
{
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Uint8_tAllocator,
typename Dispatch,
typename B1,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
size_t dataSize;
VkResult result;
do
{
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount,
const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,
const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindShadersEXT(
m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( stages.size() == shaders.size() );
# else
if ( stages.size() != shaders.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
d.vkCmdBindShadersEXT( m_commandBuffer,
stages.size(),
reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_QCOM_tile_properties ===
template <typename Dispatch>

View file

@ -432,6 +432,7 @@ namespace VULKAN_HPP_NAMESPACE
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PhysicalDeviceInlineUniformBlockFeatures;
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
struct PhysicalDeviceInlineUniformBlockProperties;
@ -1432,6 +1433,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_2d_view_of_3d ===
struct PhysicalDeviceImage2DViewOf3DFeaturesEXT;
//=== VK_EXT_shader_tile_image ===
struct PhysicalDeviceShaderTileImageFeaturesEXT;
struct PhysicalDeviceShaderTileImagePropertiesEXT;
//=== VK_EXT_opacity_micromap ===
struct MicromapBuildInfoEXT;
struct MicromapUsageEXT;
@ -1552,6 +1557,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_pipeline_protected_access ===
struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
//=== VK_EXT_shader_object ===
struct PhysicalDeviceShaderObjectFeaturesEXT;
struct PhysicalDeviceShaderObjectPropertiesEXT;
struct ShaderCreateInfoEXT;
//=== VK_QCOM_tile_properties ===
struct PhysicalDeviceTilePropertiesFeaturesQCOM;
struct TilePropertiesQCOM;
@ -1677,6 +1687,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
class ShaderEXT;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
//======================
//=== UNIQUE HANDLEs ===
@ -2012,6 +2025,15 @@ namespace VULKAN_HPP_NAMESPACE
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniqueOpticalFlowSessionNV = UniqueHandle<OpticalFlowSessionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_EXT_shader_object ===
template <typename Dispatch>
class UniqueHandleTraits<ShaderEXT, Dispatch>
{
public:
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniqueShaderEXT = UniqueHandle<ShaderEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
//===============
@ -3211,6 +3233,85 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
class ShaderEXT
{
public:
using CType = VkShaderEXT;
using NativeType = VkShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR ShaderEXT() = default;
VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT
{
m_shaderEXT = shaderEXT;
return *this;
}
#endif
ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_shaderEXT = {};
return *this;
}
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( ShaderEXT const & ) const = default;
#else
bool operator==( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT == rhs.m_shaderEXT;
}
bool operator!=( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT != rhs.m_shaderEXT;
}
bool operator<( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT < rhs.m_shaderEXT;
}
#endif
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT == VK_NULL_HANDLE;
}
private:
VkShaderEXT m_shaderEXT = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT>
{
using Type = VULKAN_HPP_NAMESPACE::ShaderEXT;
};
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderEXT>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
class Image
{
public:
@ -5978,6 +6079,20 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindShadersEXT( uint32_t stageCount,
const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,
const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
@ -12476,6 +12591,97 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount,
const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B0 = ShaderEXTAllocator,
typename std::enable_if<std::is_same<typename B0::value_type, ShaderEXT>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>,
typename B0 = ShaderEXTAllocator,
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
size_t * pDataSize,
void * pData,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Uint8_tAllocator = std::allocator<uint8_t>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B1 = Uint8_tAllocator,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(
VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_QCOM_tile_properties ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>

View file

@ -492,6 +492,17 @@ namespace std
}
};
//=== VK_EXT_shader_object ===
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkShaderEXT>{}( static_cast<VkShaderEXT>( shaderEXT ) );
}
};
#if 14 <= VULKAN_HPP_CPP_VERSION
//======================================
//=== HASH structures for structures ===
@ -10074,6 +10085,38 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext );
for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
{
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] );
}
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>
{
@ -10145,6 +10188,38 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>
{
@ -12680,6 +12755,33 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode );
for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p )
{
VULKAN_HPP_HASH_COMBINE( seed, *p );
}
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>
{

View file

@ -1593,6 +1593,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
@ -2384,6 +2390,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
@ -2480,6 +2492,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
class ShaderEXT;
//====================
//=== RAII HANDLES ===
//====================
@ -4128,6 +4143,16 @@ namespace VULKAN_HPP_NAMESPACE
createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
@ -5855,6 +5880,11 @@ namespace VULKAN_HPP_NAMESPACE
void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_shader_object ===
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
@ -10142,6 +10172,169 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXT
{
public:
using CType = VkShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
m_constructorSuccessCode =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShadersEXT( static_cast<VkDevice>( *device ),
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
reinterpret_cast<VkShaderEXT *>( &m_shader ) ) );
if ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( m_constructorSuccessCode, "vkCreateShadersEXT" );
}
}
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkShaderEXT shader,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_shader( shader )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
ShaderEXT( std::nullptr_t ) {}
~ShaderEXT()
{
clear();
}
ShaderEXT() = delete;
ShaderEXT( ShaderEXT const & ) = delete;
ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
ShaderEXT & operator=( ShaderEXT const & ) = delete;
ShaderEXT & operator =( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_shader = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_shader;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_shader )
{
getDispatcher()->vkDestroyShaderEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_shader = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ShaderEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_shader, rhs.m_shader );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getBinaryData() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXTs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
{
public:
ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkShaderEXT> shaders( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateShadersEXT(
static_cast<VkDevice>( *device ),
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
shaders.data() ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
this->reserve( createInfos.size() );
for ( auto const & shaderEXT : shaders )
{
this->emplace_back( device, shaderEXT, allocator );
}
}
else
{
throwResultException( result, "vkCreateShadersEXT" );
}
}
ShaderEXTs( std::nullptr_t ) {}
ShaderEXTs() = delete;
ShaderEXTs( ShaderEXTs const & ) = delete;
ShaderEXTs( ShaderEXTs && rhs ) = default;
ShaderEXTs & operator=( ShaderEXTs const & ) = delete;
ShaderEXTs & operator=( ShaderEXTs && rhs ) = default;
};
class ShaderModule
{
public:
@ -13453,14 +13646,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && "Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode &&
"Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && "Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace &&
"Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -13468,7 +13663,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology &&
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -13477,7 +13672,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount &&
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -13487,7 +13682,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount &&
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -13500,7 +13695,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 &&
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
@ -13526,7 +13721,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable &&
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -13534,7 +13729,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable &&
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -13542,7 +13737,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp &&
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -13550,7 +13745,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable &&
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -13558,7 +13753,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable &&
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -13569,7 +13764,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && "Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp &&
"Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -13582,7 +13778,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable &&
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -13590,7 +13786,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable &&
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
@ -13598,7 +13794,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable &&
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
@ -17634,7 +17830,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
@ -17642,7 +17838,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -17650,7 +17846,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -17659,7 +17855,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -17669,7 +17865,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -17683,7 +17879,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
@ -17709,7 +17905,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -17717,7 +17913,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -17725,7 +17921,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -17733,7 +17929,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -17741,7 +17937,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -17753,7 +17949,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -18572,7 +18768,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_vertex_input_dynamic_state>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
"Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexBindingDescriptions.size(),
@ -18767,15 +18964,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2>" );
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -18783,22 +18981,24 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
"Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
@ -19286,7 +19486,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
"Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
@ -19294,14 +19494,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && "Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
"Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && "Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
"Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
}
@ -19309,7 +19511,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
"Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
@ -19318,7 +19520,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
"Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING
@ -19333,21 +19536,23 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
"Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && "Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
"Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && "Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
"Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
}
@ -19355,7 +19560,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
"Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19368,7 +19574,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
"Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19380,7 +19586,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
"Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19391,7 +19598,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
"Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
}
@ -19400,7 +19607,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
"Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
@ -19409,14 +19616,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
"Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && "Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
"Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
}
@ -19424,7 +19632,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
"Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
}
@ -19434,7 +19642,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
"Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19446,7 +19654,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
"Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
@ -19456,7 +19664,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
"Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
@ -19464,7 +19672,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && "Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
"Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
}
@ -19472,7 +19681,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
"Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
}
@ -19480,7 +19689,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
"Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
}
@ -19489,7 +19698,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
"Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstViewport,
@ -19500,7 +19710,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
"Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
}
@ -19508,7 +19718,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
"Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
}
@ -19517,7 +19727,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
"Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
@ -19527,7 +19737,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
"Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( coverageModulationTableEnable ) );
@ -19537,7 +19747,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
"Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationTableNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
@ -19546,7 +19756,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
"Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
}
@ -19555,7 +19765,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
"Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( representativeFragmentTestEnable ) );
@ -19565,7 +19775,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
"Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
@ -19667,6 +19877,63 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs( *this, createInfos, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ShaderEXT::getBinaryData() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
std::vector<uint8_t> data;
size_t dataSize;
VkResult result;
do
{
result = getDispatcher()->vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = getDispatcher()->vkGetShaderBinaryDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" );
if ( stages.size() != shaders.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
}
getDispatcher()->vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
stages.size(),
reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
}
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const

View file

@ -5780,6 +5780,22 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
"PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_shader_tile_image ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
"PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
"PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_opacity_micromap ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
@ -6321,6 +6337,30 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
"PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_shader_object ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderEXT>::value, "ShaderEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
"PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
"PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value,
"ShaderCreateInfoEXT is not nothrow_move_constructible!" );
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ),

View file

@ -73573,6 +73573,190 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
};
struct PhysicalDeviceShaderObjectFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderObject( shaderObject_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT
{
shaderObject = shaderObject_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
}
operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderObject );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject );
# endif
}
bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectFeaturesEXT>
{
using Type = PhysicalDeviceShaderObjectFeaturesEXT;
};
struct PhysicalDeviceShaderObjectPropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array<uint8_t, VK_UUID_SIZE> const & shaderBinaryUUID_ = {},
uint32_t shaderBinaryVersion_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderBinaryUUID( shaderBinaryUUID_ )
, shaderBinaryVersion( shaderBinaryVersion_ )
{
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) &&
( shaderBinaryVersion == rhs.shaderBinaryVersion );
# endif
}
bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderBinaryUUID = {};
uint32_t shaderBinaryVersion = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectPropertiesEXT>
{
using Type = PhysicalDeviceShaderObjectPropertiesEXT;
};
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
@ -74058,6 +74242,226 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
struct PhysicalDeviceShaderTileImageFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderTileImageColorReadAccess( shaderTileImageColorReadAccess_ )
, shaderTileImageDepthReadAccess( shaderTileImageDepthReadAccess_ )
, shaderTileImageStencilReadAccess( shaderTileImageStencilReadAccess_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
}
operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
void * const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) &&
( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) &&
( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess );
# endif
}
bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT>
{
using Type = PhysicalDeviceShaderTileImageFeaturesEXT;
};
struct PhysicalDeviceShaderTileImagePropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderTileImageCoherentReadAccelerated( shaderTileImageCoherentReadAccelerated_ )
, shaderTileImageReadSampleFromPixelRateInvocation( shaderTileImageReadSampleFromPixelRateInvocation_ )
, shaderTileImageReadFromHelperInvocation( shaderTileImageReadFromHelperInvocation_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
void * const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie(
sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) &&
( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) &&
( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation );
# endif
}
bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT>
{
using Type = PhysicalDeviceShaderTileImagePropertiesEXT;
};
struct PhysicalDeviceShadingRateImageFeaturesNV
{
using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
@ -83559,6 +83963,7 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
};
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PipelineTessellationDomainOriginStateCreateInfo
{
@ -94204,6 +94609,321 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t data = {};
};
struct ShaderCreateInfoEXT
{
using NativeType = VkShaderCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {},
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {},
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary,
size_t codeSize_ = {},
const void * pCode_ = {},
const char * pName_ = {},
uint32_t setLayoutCount_ = {},
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
uint32_t pushConstantRangeCount_ = {},
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {},
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, flags( flags_ )
, stage( stage_ )
, nextStage( nextStage_ )
, codeType( codeType_ )
, codeSize( codeSize_ )
, pCode( pCode_ )
, pName( pName_ )
, setLayoutCount( setLayoutCount_ )
, pSetLayouts( pSetLayouts_ )
, pushConstantRangeCount( pushConstantRangeCount_ )
, pPushConstantRanges( pPushConstantRanges_ )
, pSpecializationInfo( pSpecializationInfo_ )
{
}
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast<ShaderCreateInfoEXT const *>( &rhs ) )
{
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_,
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_,
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_,
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_,
const char * pName_ = {},
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ = {},
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {},
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
const void * pNext_ = nullptr )
: pNext( pNext_ )
, flags( flags_ )
, stage( stage_ )
, nextStage( nextStage_ )
, codeType( codeType_ )
, codeSize( code_.size() * sizeof( T ) )
, pCode( code_.data() )
, pName( pName_ )
, setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
, pSetLayouts( setLayouts_.data() )
, pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
, pPushConstantRanges( pushConstantRanges_.data() )
, pSpecializationInfo( pSpecializationInfo_ )
{
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT
{
nextStage = nextStage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT
{
codeType = codeType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = codeSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT
{
pCode = pCode_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = code_.size() * sizeof( T );
pCode = code_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = setLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT &
setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT &
setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT & setPushConstantRanges(
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT &
setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderCreateInfoEXT *>( this );
}
operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderCreateInfoEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT const &,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &,
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT const &,
size_t const &,
const void * const &,
const char * const &,
uint32_t const &,
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &,
uint32_t const &,
const VULKAN_HPP_NAMESPACE::PushConstantRange * const &,
const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType,
pNext,
flags,
stage,
nextStage,
codeType,
codeSize,
pCode,
pName,
setLayoutCount,
pSetLayouts,
pushConstantRangeCount,
pPushConstantRanges,
pSpecializationInfo );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
return cmp;
if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
return cmp;
if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 )
return cmp;
if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 )
return cmp;
if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 )
return cmp;
if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 )
return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 )
return cmp;
if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 )
return cmp;
if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 )
return cmp;
if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 )
return cmp;
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) &&
( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) &&
( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) &&
( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
}
bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {};
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary;
size_t codeSize = {};
const void * pCode = {};
const char * pName = {};
uint32_t setLayoutCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderCreateInfoEXT>
{
using Type = ShaderCreateInfoEXT;
};
struct ShaderModuleCreateInfo
{
using NativeType = VkShaderModuleCreateInfo;

View file

@ -3205,6 +3205,32 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value )
{
if ( !value )
return "{}";
std::string result;
if ( value & ShaderCreateFlagBitsEXT::eLinkStage )
result += "LinkStage | ";
if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize )
result += "AllowVaryingSubgroupSize | ";
if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups )
result += "RequireFullSubgroups | ";
if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader )
result += "NoTaskShader | ";
if ( value & ShaderCreateFlagBitsEXT::eDispatchBase )
result += "DispatchBase | ";
if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment )
result += "FragmentShadingRateAttachment | ";
if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment )
result += "FragmentDensityMapAttachment | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
//=======================
//=== ENUMs to_string ===
//=======================
@ -3276,6 +3302,7 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT";
case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@ -4018,6 +4045,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT";
case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT";
case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT";
case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT";
case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT";
case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT";
case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT";
case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT";
@ -4080,6 +4109,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT";
case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT";
case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM";
case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM";
case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC";
@ -4161,6 +4193,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_FUCHSIA*/
case ObjectType::eMicromapEXT: return "MicromapEXT";
case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV";
case ObjectType::eShaderEXT: return "ShaderEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@ -8312,6 +8345,33 @@ namespace VULKAN_HPP_NAMESPACE
}
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value )
{
switch ( value )
{
case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage";
case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize";
case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups";
case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader";
case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase";
case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment";
case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE std::string to_string( ShaderCodeTypeEXT value )
{
switch ( value )
{
case ShaderCodeTypeEXT::eBinary: return "Binary";
case ShaderCodeTypeEXT::eSpirv: return "Spirv";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
//=== VK_NV_ray_tracing_invocation_reorder ===
VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value )

View file

@ -364,10 +364,12 @@ def makeGenOpts(args):
'VK_KHR_video_encode_queue',
'VK_EXT_video_encode_h264',
'VK_EXT_video_encode_h265',
'VK_NV_displacement_micromap',
]
betaSuppressExtensions = [
'VK_KHR_video_queue'
'VK_KHR_video_queue',
'VK_EXT_opacity_micromap',
]
platforms = [

File diff suppressed because one or more lines are too long

View file

@ -173,7 +173,7 @@ branch of the member gitlab server.
#define <name>VKSC_API_VERSION_1_0</name> <type>VK_MAKE_API_VERSION</type>(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0</type>
<type api="vulkan" category="define">// Version of this file
#define <name>VK_HEADER_VERSION</name> 245</type>
#define <name>VK_HEADER_VERSION</name> 246</type>
<type api="vulkan" category="define" requires="VK_HEADER_VERSION">// Complete version of this file
#define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 3, VK_HEADER_VERSION)</type>
<type api="vulkansc" category="define">// Version of this file
@ -474,6 +474,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type requires="VkOpticalFlowExecuteFlagBitsNV" category="bitmask">typedef <type>VkFlags</type> <name>VkOpticalFlowExecuteFlagsNV</name>;</type>
<type requires="VkPresentScalingFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentScalingFlagsEXT</name>;</type>
<type requires="VkPresentGravityFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentGravityFlagsEXT</name>;</type>
<type requires="VkShaderCreateFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkShaderCreateFlagsEXT</name>;</type>
<comment>Video Core extension</comment>
<type requires="VkVideoCodecOperationFlagBitsKHR" category="bitmask">typedef <type>VkFlags</type> <name>VkVideoCodecOperationFlagsKHR</name>;</type>
@ -555,6 +556,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_CU_FUNCTION_NVX"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkCuFunctionNVX</name>)</type>
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkOpticalFlowSessionNV</name>)</type>
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_MICROMAP_EXT"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkMicromapEXT</name>)</type>
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_SHADER_EXT"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkShaderEXT</name>)</type>
<comment>WSI extensions</comment>
<type category="handle" parent="VkPhysicalDevice" objtypeenum="VK_OBJECT_TYPE_DISPLAY_KHR"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDisplayKHR</name>)</type>
@ -768,6 +770,8 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type name="VkMemoryDecompressionMethodFlagBitsNV" category="enum"/>
<type name="VkDirectDriverLoadingModeLUNARG" category="enum"/>
<type name="VkDisplacementMicromapFormatNV" category="enum"/>
<type name="VkShaderCreateFlagBitsEXT" category="enum"/>
<type name="VkShaderCodeTypeEXT" category="enum"/>
<comment>WSI extensions</comment>
<type name="VkColorSpaceKHR" category="enum"/>
@ -5239,12 +5243,13 @@ typedef void* <name>MTLSharedEvent_id</name>;
<member limittype="bitmask"><type>VkShaderStageFlags</type> <name>requiredSubgroupSizeStages</name><comment>The shader stages that support specifying a subgroup size</comment></member>
</type>
<type category="struct" name="VkPhysicalDeviceSubgroupSizeControlPropertiesEXT" alias="VkPhysicalDeviceSubgroupSizeControlProperties"/>
<type category="struct" name="VkPipelineShaderStageRequiredSubgroupSizeCreateInfo" returnedonly="true" structextends="VkPipelineShaderStageCreateInfo">
<type category="struct" name="VkPipelineShaderStageRequiredSubgroupSizeCreateInfo" returnedonly="true" structextends="VkPipelineShaderStageCreateInfo,VkShaderCreateInfoEXT">
<member values="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>uint32_t</type> <name>requiredSubgroupSize</name></member>
</type>
<type category="struct" name="VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT" alias="VkPipelineShaderStageRequiredSubgroupSizeCreateInfo"/>
<type category="struct" name="VkShaderRequiredSubgroupSizeCreateInfoEXT" alias="VkPipelineShaderStageRequiredSubgroupSizeCreateInfo"/>
<type category="struct" name="VkSubpassShadingPipelineCreateInfoHUAWEI" returnedonly="true" structextends="VkComputePipelineCreateInfo">
<member values="VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
@ -7680,10 +7685,10 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type category="struct" name="VkAccelerationStructureTrianglesDisplacementMicromapNV" structextends="VkAccelerationStructureGeometryTrianglesDataKHR">
<member values="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkFormat</type> <name>displacementBiasAndScaleFormat</name></member>
<member><type>VkFormat</type> <name>displacementVectorFormat</name></member>
<member noautovalidity="true"><type>VkDeviceOrHostAddressConstKHR</type> <name>displacementBiasAndScaleBuffer</name></member>
<member><type>VkDeviceSize</type> <name>displacementBiasAndScaleStride</name></member>
<member noautovalidity="true"><type>VkDeviceOrHostAddressConstKHR</type> <name>displacementVectorBuffer</name></member>
@ -7693,13 +7698,13 @@ typedef void* <name>MTLSharedEvent_id</name>;
<member><type>VkIndexType</type> <name>indexType</name></member>
<member noautovalidity="true"><type>VkDeviceOrHostAddressConstKHR</type> <name>indexBuffer</name></member>
<member><type>VkDeviceSize</type> <name>indexStride</name></member>
<member><type>uint32_t</type> <name>baseTriangle</name></member>
<member optional="true"><type>uint32_t</type> <name>usageCountsCount</name></member>
<member len="usageCountsCount" optional="true">const <type>VkMicromapUsageEXT</type>* <name>pUsageCounts</name></member>
<member len="usageCountsCount,1" optional="true,false">const <type>VkMicromapUsageEXT</type>* const* <name>ppUsageCounts</name></member>
<member><type>VkMicromapEXT</type> <name>micromap</name></member>
</type>
<type category="struct" name="VkPipelinePropertiesIdentifierEXT">
@ -8126,6 +8131,47 @@ typedef void* <name>MTLSharedEvent_id</name>;
<member optional="true"><type>VkMemoryUnmapFlagsKHR</type> <name>flags</name></member>
<member externsync="true"><type>VkDeviceMemory</type> <name>memory</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceShaderObjectFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true" noautovalidity="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkBool32</type> <name>shaderObject</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceShaderObjectPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true" noautovalidity="true"><type>void</type>* <name>pNext</name></member>
<member limittype="noauto"><type>uint8_t</type> <name>shaderBinaryUUID</name>[<enum>VK_UUID_SIZE</enum>]</member>
<member limittype="noauto"><type>uint32_t</type> <name>shaderBinaryVersion</name></member>
</type>
<type category="struct" name="VkShaderCreateInfoEXT">
<member values="VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member>
<member optional="true"><type>VkShaderCreateFlagsEXT</type> <name>flags</name></member>
<member><type>VkShaderStageFlagBits</type> <name>stage</name></member>
<member optional="true"><type>VkShaderStageFlags</type> <name>nextStage</name></member>
<member><type>VkShaderCodeTypeEXT</type> <name>codeType</name></member>
<member><type>size_t</type> <name>codeSize</name></member>
<member len="codeSize">const <type>void</type>* <name>pCode</name></member>
<member optional="true" len="null-terminated">const <type>char</type>* <name>pName</name></member>
<member optional="true"><type>uint32_t</type> <name>setLayoutCount</name></member>
<member optional="true" len="setLayoutCount">const <type>VkDescriptorSetLayout</type>* <name>pSetLayouts</name></member>
<member optional="true"><type>uint32_t</type> <name>pushConstantRangeCount</name></member>
<member optional="true" len="pushConstantRangeCount">const <type>VkPushConstantRange</type>* <name>pPushConstantRanges</name></member>
<member optional="true">const <type>VkSpecializationInfo</type>* <name>pSpecializationInfo</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceShaderTileImageFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkBool32</type> <name>shaderTileImageColorReadAccess</name></member>
<member><type>VkBool32</type> <name>shaderTileImageDepthReadAccess</name></member>
<member><type>VkBool32</type> <name>shaderTileImageStencilReadAccess</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceShaderTileImagePropertiesEXT" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member limittype="noauto"><type>VkBool32</type> <name>shaderTileImageCoherentReadAccelerated</name></member>
<member limittype="noauto"><type>VkBool32</type> <name>shaderTileImageReadSampleFromPixelRateInvocation</name></member>
<member limittype="noauto"><type>VkBool32</type> <name>shaderTileImageReadFromHelperInvocation</name></member>
</type>
</types>
@ -10156,6 +10202,19 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum value="2" name="VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV"/>
<enum value="3" name="VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV"/>
</enums>
<enums name="VkShaderCreateFlagBitsEXT" type="bitmask">
<enum bitpos="0" name="VK_SHADER_CREATE_LINK_STAGE_BIT_EXT"/>
<enum bitpos="1" name="VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"/>
<enum bitpos="2" name="VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"/>
<enum bitpos="3" name="VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT"/>
<enum bitpos="4" name="VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT"/>
<enum bitpos="5" name="VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT"/>
<enum bitpos="6" name="VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"/>
</enums>
<enums name="VkShaderCodeTypeEXT" type="enum">
<enum value="0" name="VK_SHADER_CODE_TYPE_BINARY_EXT"/>
<enum value="1" name="VK_SHADER_CODE_TYPE_SPIRV_EXT"/>
</enums>
<commands comment="Vulkan command definitions">
<command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_LAYER_NOT_PRESENT,VK_ERROR_EXTENSION_NOT_PRESENT,VK_ERROR_INCOMPATIBLE_DRIVER">
@ -14000,6 +14059,34 @@ typedef void* <name>MTLSharedEvent_id</name>;
<param><type>VkDevice</type> <name>device</name></param>
<param>const <type>VkMemoryUnmapInfoKHR</type>* <name>pMemoryUnmapInfo</name></param>
</command>
<command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT">
<proto><type>VkResult</type> <name>vkCreateShadersEXT</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param><type>uint32_t</type> <name>createInfoCount</name></param>
<param len="createInfoCount">const <type>VkShaderCreateInfoEXT</type>* <name>pCreateInfos</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
<param len="createInfoCount"><type>VkShaderEXT</type>* <name>pShaders</name></param>
</command>
<command>
<proto><type>void</type> <name>vkDestroyShaderEXT</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param externsync="true"><type>VkShaderEXT</type> <name>shader</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
</command>
<command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
<proto><type>VkResult</type> <name>vkGetShaderBinaryDataEXT</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param><type>VkShaderEXT</type> <name>shader</name></param>
<param optional="false,true"><type>size_t</type>* <name>pDataSize</name></param>
<param optional="true" len="pDataSize"><type>void</type>* <name>pData</name></param>
</command>
<command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" tasks="state">
<proto><type>void</type> <name>vkCmdBindShadersEXT</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param><type>uint32_t</type> <name>stageCount</name></param>
<param len="stageCount">const <type>VkShaderStageFlagBits</type>* <name>pStages</name></param>
<param optional="true,true" len="stageCount">const <type>VkShaderEXT</type>* <name>pShaders</name></param>
</command>
</commands>
<feature api="vulkan,vulkansc" name="VK_VERSION_1_0" number="1.0" comment="Vulkan core API interface definitions">
@ -20756,10 +20843,14 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum bitpos="0" extends="VkInstanceCreateFlagBits" name="VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR"/>
</require>
</extension>
<extension name="VK_KHR_extension_396" number="396" author="EXT" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="disabled">
<extension name="VK_EXT_shader_tile_image" number="396" type="device" author="EXT" depends="VK_VERSION_1_3" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="vulkan">
<require>
<enum value="0" name="VK_KHR_EXTENSION_396_SPEC_VERSION"/>
<enum value="&quot;VK_KHR_extension_396&quot;" name="VK_KHR_EXTENSION_396_EXTENSION_NAME"/>
<enum value="1" name="VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_shader_tile_image&quot;" name="VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME"/>
<enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT"/>
<enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT"/>
<type name="VkPhysicalDeviceShaderTileImageFeaturesEXT"/>
<type name="VkPhysicalDeviceShaderTileImagePropertiesEXT"/>
</require>
</extension>
<extension name="VK_EXT_opacity_micromap" number="397" type="device" depends="VK_KHR_acceleration_structure+VK_KHR_synchronization2" author="EXT" contact="Christoph Kubisch @pixeljetstream, Eric Werness" supported="vulkan">
@ -21617,13 +21708,83 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum value="&quot;VK_EXT_extension_482&quot;" name="VK_EXT_EXTENSION_482_EXTENSION_NAME"/>
</require>
</extension>
<extension name="VK_EXT_extension_483" number="483" author="EXT" contact="Piers Daniell" supported="disabled">
<extension name="VK_EXT_shader_object" number="483" depends="(VK_KHR_get_physical_device_properties2,VK_VERSION_1_1)+(VK_KHR_dynamic_rendering,VK_VERSION_1_3)" type="device" author="EXT" contact="Daniel Story @daniel-story" supported="vulkan">
<require>
<enum value="0" name="VK_EXT_EXTENSION_483_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_extension_483&quot;" name="VK_EXT_EXTENSION_483_EXTENSION_NAME"/>
<enum bitpos="15" extends="VkShaderStageFlagBits" name="VK_SHADER_STAGE_EXT_483_RESERVE_15"/>
<enum bitpos="16" extends="VkShaderStageFlagBits" name="VK_SHADER_STAGE_EXT_483_RESERVE_16"/>
<enum bitpos="17" extends="VkShaderStageFlagBits" name="VK_SHADER_STAGE_EXT_483_RESERVE_17"/>
<enum value="1" name="VK_EXT_SHADER_OBJECT_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_shader_object&quot;" name="VK_EXT_SHADER_OBJECT_EXTENSION_NAME"/>
<enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT"/>
<enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT"/>
<enum offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT"/>
<enum extnumber="353" offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT"/>
<enum extnumber="353" offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT"/>
<enum extends="VkStructureType" name="VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT" alias="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO"/>
<enum offset="0" extends="VkObjectType" name="VK_OBJECT_TYPE_SHADER_EXT"/>
<enum offset="0" extends="VkResult" name="VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT"/>
<type name="VkShaderEXT"/>
<type name="VkShaderCreateFlagBitsEXT"/>
<type name="VkShaderCreateFlagsEXT"/>
<type name="VkShaderCodeTypeEXT"/>
<type name="VkPhysicalDeviceShaderObjectFeaturesEXT"/>
<type name="VkPhysicalDeviceShaderObjectPropertiesEXT"/>
<type name="VkShaderCreateInfoEXT"/>
<type name="VkShaderRequiredSubgroupSizeCreateInfoEXT"/>
<type name="VkVertexInputBindingDescription2EXT"/>
<type name="VkVertexInputAttributeDescription2EXT"/>
<type name="VkColorBlendEquationEXT"/>
<type name="VkColorBlendAdvancedEXT"/>
<command name="vkCreateShadersEXT"/>
<command name="vkDestroyShaderEXT"/>
<command name="vkGetShaderBinaryDataEXT"/>
<command name="vkCmdBindShadersEXT"/>
<command name="vkCmdSetCullModeEXT"/>
<command name="vkCmdSetFrontFaceEXT"/>
<command name="vkCmdSetPrimitiveTopologyEXT"/>
<command name="vkCmdSetViewportWithCountEXT"/>
<command name="vkCmdSetScissorWithCountEXT"/>
<command name="vkCmdBindVertexBuffers2EXT"/>
<command name="vkCmdSetDepthTestEnableEXT"/>
<command name="vkCmdSetDepthWriteEnableEXT"/>
<command name="vkCmdSetDepthCompareOpEXT"/>
<command name="vkCmdSetDepthBoundsTestEnableEXT"/>
<command name="vkCmdSetStencilTestEnableEXT"/>
<command name="vkCmdSetStencilOpEXT"/>
<command name="vkCmdSetVertexInputEXT"/>
<command name="vkCmdSetPatchControlPointsEXT"/>
<command name="vkCmdSetRasterizerDiscardEnableEXT"/>
<command name="vkCmdSetDepthBiasEnableEXT"/>
<command name="vkCmdSetLogicOpEXT"/>
<command name="vkCmdSetPrimitiveRestartEnableEXT"/>
<command name="vkCmdSetTessellationDomainOriginEXT"/>
<command name="vkCmdSetDepthClampEnableEXT"/>
<command name="vkCmdSetPolygonModeEXT"/>
<command name="vkCmdSetRasterizationSamplesEXT"/>
<command name="vkCmdSetSampleMaskEXT"/>
<command name="vkCmdSetAlphaToCoverageEnableEXT"/>
<command name="vkCmdSetAlphaToOneEnableEXT"/>
<command name="vkCmdSetLogicOpEnableEXT"/>
<command name="vkCmdSetColorBlendEnableEXT"/>
<command name="vkCmdSetColorBlendEquationEXT"/>
<command name="vkCmdSetColorWriteMaskEXT"/>
<command name="vkCmdSetRasterizationStreamEXT"/>
<command name="vkCmdSetConservativeRasterizationModeEXT"/>
<command name="vkCmdSetExtraPrimitiveOverestimationSizeEXT"/>
<command name="vkCmdSetDepthClipEnableEXT"/>
<command name="vkCmdSetSampleLocationsEnableEXT"/>
<command name="vkCmdSetColorBlendAdvancedEXT"/>
<command name="vkCmdSetProvokingVertexModeEXT"/>
<command name="vkCmdSetLineRasterizationModeEXT"/>
<command name="vkCmdSetLineStippleEnableEXT"/>
<command name="vkCmdSetDepthClipNegativeOneToOneEXT"/>
<command name="vkCmdSetViewportWScalingEnableNV"/>
<command name="vkCmdSetViewportSwizzleNV"/>
<command name="vkCmdSetCoverageToColorEnableNV"/>
<command name="vkCmdSetCoverageToColorLocationNV"/>
<command name="vkCmdSetCoverageModulationModeNV"/>
<command name="vkCmdSetCoverageModulationTableEnableNV"/>
<command name="vkCmdSetCoverageModulationTableNV"/>
<command name="vkCmdSetShadingRateImageEnableNV"/>
<command name="vkCmdSetRepresentativeFragmentTestEnableNV"/>
<command name="vkCmdSetCoverageReductionModeNV"/>
</require>
</extension>
<extension name="VK_EXT_extension_484" number="484" author="KHR" contact="Chris Glover @cdglove" supported="disabled">
@ -23529,6 +23690,9 @@ typedef void* <name>MTLSharedEvent_id</name>;
<spirvextension name="SPV_EXT_mesh_shader">
<enable extension="VK_EXT_mesh_shader"/>
</spirvextension>
<spirvextension name="SPV_EXT_shader_tile_image">
<enable extension="VK_EXT_shader_tile_image"/>
</spirvextension>
</spirvextensions>
<spirvcapabilities comment="SPIR-V Capabilities allowed in Vulkan and what is required to use it">
<spirvcapability name="Matrix">
@ -24003,5 +24167,14 @@ typedef void* <name>MTLSharedEvent_id</name>;
<spirvcapability name="ClusterCullingShadingHUAWEI">
<enable struct="VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI" feature="clustercullingShader" requires="VK_HUAWEI_cluster_culling_shader"/>
</spirvcapability>
<spirvcapability name="TileImageColorReadAccessEXT">
<enable struct="VkPhysicalDeviceShaderTileImageFeaturesEXT" feature="shaderTileImageColorReadAccess" requires="VK_EXT_shader_tile_image"/>
</spirvcapability>
<spirvcapability name="TileImageDepthReadAccessEXT">
<enable struct="VkPhysicalDeviceShaderTileImageFeaturesEXT" feature="shaderTileImageDepthReadAccess" requires="VK_EXT_shader_tile_image"/>
</spirvcapability>
<spirvcapability name="TileImageStencilReadAccessEXT">
<enable struct="VkPhysicalDeviceShaderTileImageFeaturesEXT" feature="shaderTileImageStencilReadAccess" requires="VK_EXT_shader_tile_image"/>
</spirvcapability>
</spirvcapabilities>
</registry>