Update for Vulkan-Docs 1.3.269

This commit is contained in:
Jon Leech 2023-10-20 03:59:17 -07:00 committed by Jon Leech
parent f4bfcd8852
commit 374f9fd975
16 changed files with 4618 additions and 1134 deletions

View file

@ -23,10 +23,8 @@ They can also use headers from, or packaged from, this repository.
In most cases, developers should only need the headers, not the scripts and In most cases, developers should only need the headers, not the scripts and
other material in this repository. other material in this repository.
In particular if you are packaging the headers for inclusion in Linux If you need to run the scripts, please use them from their canonical source
distributions or similar uses, we advise that you do not include the in the [Vulkan Specification
scripts, and direct potential users of the scripts to their canonical
sources in the [Vulkan Specification
repository](https://github.com/KhronosGroup/Vulkan-Docs). repository](https://github.com/KhronosGroup/Vulkan-Docs).
## Contributing ## Contributing

View file

@ -784,6 +784,10 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV; using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_ARM_scheduling_controls ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagBitsARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM;
//=== VK_NV_memory_decompression === //=== VK_NV_memory_decompression ===
using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV; using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV;
using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV; using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV;
@ -2260,6 +2264,15 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV; using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
using VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV;
using VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV;
using VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_low_latency === //=== VK_NV_low_latency ===
using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV; using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV;
@ -2517,6 +2530,11 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_ARM_shader_core_properties === //=== VK_ARM_shader_core_properties ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM; using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM;
//=== VK_ARM_scheduling_controls ===
using VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM;
//=== VK_EXT_image_sliced_view_of_3d === //=== VK_EXT_image_sliced_view_of_3d ===
using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT; using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
@ -2816,6 +2834,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
using VULKAN_HPP_NAMESPACE::CudaFunctionNV;
using VULKAN_HPP_NAMESPACE::CudaModuleNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
@ -2874,6 +2898,9 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_swapchain === //=== VK_KHR_swapchain ===
using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR; using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR;
//=== VK_KHR_display ===
using VULKAN_HPP_NAMESPACE::UniqueDisplayKHR;
//=== VK_EXT_debug_report === //=== VK_EXT_debug_report ===
using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT; using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT;
@ -2897,12 +2924,21 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_ray_tracing === //=== VK_NV_ray_tracing ===
using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV; using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV;
//=== VK_INTEL_performance_query ===
using VULKAN_HPP_NAMESPACE::UniquePerformanceConfigurationINTEL;
//=== VK_KHR_deferred_host_operations === //=== VK_KHR_deferred_host_operations ===
using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR; using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR;
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV; using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
using VULKAN_HPP_NAMESPACE::UniqueCudaFunctionNV;
using VULKAN_HPP_NAMESPACE::UniqueCudaModuleNV;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA; using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA;
@ -3001,6 +3037,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV; using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
using VULKAN_HPP_NAMESPACE::SharedCudaFunctionNV;
using VULKAN_HPP_NAMESPACE::SharedCudaModuleNV;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA; using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA;
@ -3175,6 +3217,12 @@ export namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV; using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
using VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV;
using VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA; using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA;

View file

@ -55,7 +55,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span> # include <span>
#endif #endif
static_assert( VK_HEADER_VERSION == 268, "Wrong VK_HEADER_VERSION!" ); static_assert( VK_HEADER_VERSION == 269, "Wrong VK_HEADER_VERSION!" );
// <tuple> includes <sys/sysmacros.h> through some other header // <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x) // this results in major(x) being resolved to gnu_dev_major(x)
@ -94,6 +94,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {} VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {}
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT
{
copy( data.data(), data.length() );
}
#if 17 <= VULKAN_HPP_CPP_VERSION
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT
{
copy( data.data(), data.length() );
}
#endif
#if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) #if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
// on 32 bit compiles, needs overloads on index type int to resolve ambiguities // on 32 bit compiles, needs overloads on index type int to resolve ambiguities
VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT
@ -120,14 +134,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string() const operator std::string() const
{ {
return std::string( this->data() ); return std::string( this->data(), N );
} }
#if 17 <= VULKAN_HPP_CPP_VERSION #if 17 <= VULKAN_HPP_CPP_VERSION
template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string_view() const operator std::string_view() const
{ {
return std::string_view( this->data() ); return std::string_view( this->data(), N );
} }
#endif #endif
@ -174,6 +188,20 @@ namespace VULKAN_HPP_NAMESPACE
{ {
return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs ); return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
} }
private:
VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT
{
size_t n = std::min( N, len );
for ( size_t i = 0; i < n; ++i )
{
( *this )[i] = data[i];
}
for ( size_t i = n; i < N; ++i )
{
( *this )[i] = 0;
}
}
}; };
// specialization of relational operators between std::string and arrays of chars // specialization of relational operators between std::string and arrays of chars
@ -4749,6 +4777,46 @@ namespace VULKAN_HPP_NAMESPACE
} }
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VkResult vkCreateCudaModuleNV( VkDevice device,
const VkCudaModuleCreateInfoNV * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule );
}
VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData );
}
VkResult vkCreateCudaFunctionNV( VkDevice device,
const VkCudaFunctionCreateInfoNV * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction );
}
void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCudaModuleNV( device, module, pAllocator );
}
void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCudaFunctionNV( device, function, pAllocator );
}
void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT ) # if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
@ -10965,6 +11033,34 @@ namespace VULKAN_HPP_NAMESPACE
}; };
}; };
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <>
struct StructExtends<PhysicalDeviceCudaKernelLaunchFeaturesNV, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceCudaKernelLaunchFeaturesNV, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceCudaKernelLaunchPropertiesNV, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_low_latency === //=== VK_NV_low_latency ===
template <> template <>
struct StructExtends<QueryLowLatencySupportNV, SemaphoreCreateInfo> struct StructExtends<QueryLowLatencySupportNV, SemaphoreCreateInfo>
@ -12275,6 +12371,48 @@ namespace VULKAN_HPP_NAMESPACE
}; };
}; };
//=== VK_ARM_scheduling_controls ===
template <>
struct StructExtends<DeviceQueueShaderCoreControlCreateInfoARM, DeviceQueueCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<DeviceQueueShaderCoreControlCreateInfoARM, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceSchedulingControlsFeaturesARM, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceSchedulingControlsFeaturesARM, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceSchedulingControlsPropertiesARM, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
//=== VK_EXT_image_sliced_view_of_3d === //=== VK_EXT_image_sliced_view_of_3d ===
template <> template <>
struct StructExtends<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT, PhysicalDeviceFeatures2> struct StructExtends<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT, PhysicalDeviceFeatures2>
@ -14436,6 +14574,23 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0;
PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0;
PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0;
PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0;
PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0;
PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0;
#else
PFN_dummy vkCreateCudaModuleNV_placeholder = 0;
PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0;
PFN_dummy vkCreateCudaFunctionNV_placeholder = 0;
PFN_dummy vkDestroyCudaModuleNV_placeholder = 0;
PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0;
PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
@ -15760,6 +15915,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) );
vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) );
vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) );
vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) );
vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) );
vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) );
@ -16801,6 +16966,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) );
vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) );
vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) );
vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) );
vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) );
vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );

View file

@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file // Version of this file
#define VK_HEADER_VERSION 268 #define VK_HEADER_VERSION 269
// Complete version of this file // Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@ -902,6 +902,11 @@ typedef enum VkStructureType {
#endif #endif
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000,
VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001,
VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004,
VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000, VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000,
VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000, VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000,
VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001, VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001,
@ -1035,6 +1040,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000,
VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000,
VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001, VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000,
@ -1420,6 +1428,8 @@ typedef enum VkObjectType {
VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000,
VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001,
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
@ -10768,6 +10778,8 @@ typedef enum VkDebugReportObjectTypeEXT {
VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001,
VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000,
VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000,
VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001,
VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000,
VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
@ -14917,6 +14929,98 @@ typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" #define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_cuda_kernel_launch 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV)
#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2
#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch"
typedef struct VkCudaModuleCreateInfoNV {
VkStructureType sType;
const void* pNext;
size_t dataSize;
const void* pData;
} VkCudaModuleCreateInfoNV;
typedef struct VkCudaFunctionCreateInfoNV {
VkStructureType sType;
const void* pNext;
VkCudaModuleNV module;
const char* pName;
} VkCudaFunctionCreateInfoNV;
typedef struct VkCudaLaunchInfoNV {
VkStructureType sType;
const void* pNext;
VkCudaFunctionNV function;
uint32_t gridDimX;
uint32_t gridDimY;
uint32_t gridDimZ;
uint32_t blockDimX;
uint32_t blockDimY;
uint32_t blockDimZ;
uint32_t sharedMemBytes;
size_t paramCount;
const void* const * pParams;
size_t extraCount;
const void* const * pExtras;
} VkCudaLaunchInfoNV;
typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV {
VkStructureType sType;
void* pNext;
VkBool32 cudaKernelLaunchFeatures;
} VkPhysicalDeviceCudaKernelLaunchFeaturesNV;
typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV {
VkStructureType sType;
void* pNext;
uint32_t computeCapabilityMinor;
uint32_t computeCapabilityMajor;
} VkPhysicalDeviceCudaKernelLaunchPropertiesNV;
typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule);
typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData);
typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction);
typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV(
VkDevice device,
const VkCudaModuleCreateInfoNV* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkCudaModuleNV* pModule);
VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV(
VkDevice device,
VkCudaModuleNV module,
size_t* pCacheSize,
void* pCacheData);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV(
VkDevice device,
const VkCudaFunctionCreateInfoNV* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkCudaFunctionNV* pFunction);
VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV(
VkDevice device,
VkCudaModuleNV module,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV(
VkDevice device,
VkCudaFunctionNV function,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV(
VkCommandBuffer commandBuffer,
const VkCudaLaunchInfoNV* pLaunchInfo);
#endif
// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. // VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_low_latency 1 #define VK_NV_low_latency 1
#define VK_NV_LOW_LATENCY_SPEC_VERSION 1 #define VK_NV_LOW_LATENCY_SPEC_VERSION 1
@ -16475,6 +16579,36 @@ typedef struct VkPhysicalDeviceShaderCorePropertiesARM {
// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls.
#define VK_ARM_scheduling_controls 1
#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1
#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls"
typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM;
typedef enum VkPhysicalDeviceSchedulingControlsFlagBitsARM {
VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001,
VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FLAG_BITS_MAX_ENUM_ARM = 0x7FFFFFFF
} VkPhysicalDeviceSchedulingControlsFlagBitsARM;
typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM {
VkStructureType sType;
void* pNext;
uint32_t shaderCoreCount;
} VkDeviceQueueShaderCoreControlCreateInfoARM;
typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM {
VkStructureType sType;
void* pNext;
VkBool32 schedulingControls;
} VkPhysicalDeviceSchedulingControlsFeaturesARM;
typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM {
VkStructureType sType;
void* pNext;
VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags;
} VkPhysicalDeviceSchedulingControlsPropertiesARM;
// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. // VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls.
#define VK_EXT_image_sliced_view_of_3d 1 #define VK_EXT_image_sliced_view_of_3d 1
#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 #define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1

View file

@ -1089,6 +1089,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eCudaModuleCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV,
eCudaFunctionCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV,
eCudaLaunchInfoNV = VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV,
ePhysicalDeviceCudaKernelLaunchFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV,
ePhysicalDeviceCudaKernelLaunchPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV, eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV,
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT, eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT,
@ -1260,6 +1267,9 @@ namespace VULKAN_HPP_NAMESPACE
eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM,
eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM,
ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM,
ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM,
ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT,
eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT,
ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE,
@ -1430,6 +1440,10 @@ namespace VULKAN_HPP_NAMESPACE
eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eCudaModuleNV = VK_OBJECT_TYPE_CUDA_MODULE_NV,
eCudaFunctionNV = VK_OBJECT_TYPE_CUDA_FUNCTION_NV,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/ #endif /*VK_USE_PLATFORM_FUCHSIA*/
@ -4660,6 +4674,10 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eCudaModuleNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV,
eCudaFunctionNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT
#endif /*VK_USE_PLATFORM_FUCHSIA*/ #endif /*VK_USE_PLATFORM_FUCHSIA*/
@ -6753,6 +6771,22 @@ namespace VULKAN_HPP_NAMESPACE
}; };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_ARM_scheduling_controls ===
enum class PhysicalDeviceSchedulingControlsFlagBitsARM : VkPhysicalDeviceSchedulingControlsFlagsARM
{
eShaderCoreCount = VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM
};
using PhysicalDeviceSchedulingControlsFlagsARM = Flags<PhysicalDeviceSchedulingControlsFlagBitsARM>;
template <>
struct FlagTraits<PhysicalDeviceSchedulingControlsFlagBitsARM>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR PhysicalDeviceSchedulingControlsFlagsARM allFlags = PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount;
};
//=== VK_NV_memory_decompression === //=== VK_NV_memory_decompression ===
enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV
@ -7289,6 +7323,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
case VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
case VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;

View file

@ -298,6 +298,9 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
"VK_NV_device_diagnostics_config", "VK_NV_device_diagnostics_config",
"VK_QCOM_render_pass_store_ops", "VK_QCOM_render_pass_store_ops",
#if defined( VK_ENABLE_BETA_EXTENSIONS )
"VK_NV_cuda_kernel_launch",
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
"VK_NV_low_latency", "VK_NV_low_latency",
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
"VK_EXT_metal_objects", "VK_EXT_metal_objects",
@ -364,6 +367,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_EXT_pageable_device_local_memory", "VK_EXT_pageable_device_local_memory",
"VK_KHR_maintenance4", "VK_KHR_maintenance4",
"VK_ARM_shader_core_properties", "VK_ARM_shader_core_properties",
"VK_ARM_scheduling_controls",
"VK_EXT_image_sliced_view_of_3d", "VK_EXT_image_sliced_view_of_3d",
"VK_VALVE_descriptor_set_host_mapping", "VK_VALVE_descriptor_set_host_mapping",
"VK_EXT_depth_clamp_zero_one", "VK_EXT_depth_clamp_zero_one",
@ -773,6 +777,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_EXT_pageable_device_local_memory", { { "VK_VERSION_1_0", { { "VK_EXT_memory_priority", } } } } }, { "VK_EXT_pageable_device_local_memory", { { "VK_VERSION_1_0", { { "VK_EXT_memory_priority", } } } } },
{ "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { { } } } } }, { "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { { } } } } },
{ "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { { } } } } }, { "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { { } } } } },
{ "VK_ARM_scheduling_controls", { { "VK_VERSION_1_0", { { "VK_ARM_shader_core_builtins", } } } } },
{ "VK_EXT_image_sliced_view_of_3d", { { "VK_VERSION_1_0", { { "VK_KHR_maintenance1", "VK_KHR_get_physical_device_properties2", } } } } }, { "VK_EXT_image_sliced_view_of_3d", { { "VK_VERSION_1_0", { { "VK_KHR_maintenance1", "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_VALVE_descriptor_set_host_mapping", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, { "VK_VALVE_descriptor_set_host_mapping", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
{ "VK_EXT_depth_clamp_zero_one", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, { "VK_EXT_depth_clamp_zero_one", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } },
@ -1480,7 +1485,11 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS ) #if defined( VK_ENABLE_BETA_EXTENSIONS )
|| ( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_KHR_video_encode_queue" )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
|| ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" ) || ( extension == "VK_NV_low_latency" ) || ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|| ( extension == "VK_NV_cuda_kernel_launch" )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|| ( extension == "VK_NV_low_latency" )
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
|| ( extension == "VK_EXT_metal_objects" ) || ( extension == "VK_EXT_metal_objects" )
#endif /*VK_USE_PLATFORM_METAL_EXT*/ #endif /*VK_USE_PLATFORM_METAL_EXT*/
@ -1517,16 +1526,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
|| ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) || || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) ||
( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) || ( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) ||
( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) || ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_ARM_scheduling_controls" ) ||
( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) || ( extension == "VK_VALVE_descriptor_set_host_mapping" ) ||
( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || ( extension == "VK_EXT_non_seamless_cube_map" ) ||
( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_NV_copy_memory_indirect" ) ||
( extension == "VK_NV_device_generated_commands_compute" ) || ( extension == "VK_NV_linear_color_attachment" ) || ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_device_generated_commands_compute" ) ||
( extension == "VK_EXT_image_compression_control_swapchain" ) || ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_NV_linear_color_attachment" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) ||
( extension == "VK_EXT_nested_command_buffer" ) || ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_nested_command_buffer" ) ||
( extension == "VK_EXT_extended_dynamic_state3" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) ||
( extension == "VK_EXT_shader_module_identifier" ) || ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) ||
( extension == "VK_NV_optical_flow" ) || ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) ||
( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" )
#if defined( VK_USE_PLATFORM_ANDROID_KHR ) #if defined( VK_USE_PLATFORM_ANDROID_KHR )
|| ( extension == "VK_ANDROID_external_format_resolve" ) || ( extension == "VK_ANDROID_external_format_resolve" )
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/

View file

@ -19210,6 +19210,304 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateCudaModuleNV( m_device,
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCudaModuleNV *>( pModule ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type
Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::CudaModuleNV module;
VkResult result =
d.vkCreateCudaModuleNV( m_device,
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaModuleNV *>( &module ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type
Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::CudaModuleNV module;
VkResult result =
d.vkCreateCudaModuleNV( m_device,
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaModuleNV *>( &module ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" );
return createResultValueType(
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
size_t * pCacheSize,
void * pCacheData,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> cacheData;
size_t cacheSize;
VkResult result;
do
{
result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr );
if ( ( result == VK_SUCCESS ) && cacheSize )
{
cacheData.resize( cacheSize );
result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
if ( cacheSize < cacheData.size() )
{
cacheData.resize( cacheSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), cacheData );
}
template <typename Uint8_tAllocator,
typename Dispatch,
typename B1,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator );
size_t cacheSize;
VkResult result;
do
{
result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr );
if ( ( result == VK_SUCCESS ) && cacheSize )
{
cacheData.resize( cacheSize );
result = d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" );
VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
if ( cacheSize < cacheData.size() )
{
cacheData.resize( cacheSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), cacheData );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateCudaFunctionNV( m_device,
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type
Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
VkResult result =
d.vkCreateCudaFunctionNV( m_device,
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaFunctionNV *>( &function ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type
Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
VkResult result =
d.vkCreateCudaFunctionNV( m_device,
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaFunctionNV *>( &function ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" );
return createResultValueType(
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaModuleNV( m_device,
static_cast<VkCudaModuleNV>( module ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaModuleNV( m_device,
static_cast<VkCudaModuleNV>( module ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaFunctionNV( m_device,
static_cast<VkCudaFunctionNV>( function ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyCudaFunctionNV( m_device,
static_cast<VkCudaFunctionNV>( function ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
}
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===

View file

@ -1261,6 +1261,15 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceDiagnosticsConfigFeaturesNV; struct PhysicalDeviceDiagnosticsConfigFeaturesNV;
struct DeviceDiagnosticsConfigCreateInfoNV; struct DeviceDiagnosticsConfigCreateInfoNV;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
struct CudaModuleCreateInfoNV;
struct CudaFunctionCreateInfoNV;
struct CudaLaunchInfoNV;
struct PhysicalDeviceCudaKernelLaunchFeaturesNV;
struct PhysicalDeviceCudaKernelLaunchPropertiesNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_low_latency === //=== VK_NV_low_latency ===
struct QueryLowLatencySupportNV; struct QueryLowLatencySupportNV;
@ -1518,6 +1527,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_ARM_shader_core_properties === //=== VK_ARM_shader_core_properties ===
struct PhysicalDeviceShaderCorePropertiesARM; struct PhysicalDeviceShaderCorePropertiesARM;
//=== VK_ARM_scheduling_controls ===
struct DeviceQueueShaderCoreControlCreateInfoARM;
struct PhysicalDeviceSchedulingControlsFeaturesARM;
struct PhysicalDeviceSchedulingControlsPropertiesARM;
//=== VK_EXT_image_sliced_view_of_3d === //=== VK_EXT_image_sliced_view_of_3d ===
struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
struct ImageViewSlicedCreateInfoEXT; struct ImageViewSlicedCreateInfoEXT;
@ -1815,6 +1829,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
class IndirectCommandsLayoutNV; class IndirectCommandsLayoutNV;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
class CudaModuleNV;
class CudaFunctionNV;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
class BufferCollectionFUCHSIA; class BufferCollectionFUCHSIA;
@ -2041,6 +2061,15 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>; using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_KHR_display ===
template <typename Dispatch>
class UniqueHandleTraits<DisplayKHR, Dispatch>
{
public:
using deleter = ObjectDestroy<PhysicalDevice, Dispatch>;
};
using UniqueDisplayKHR = UniqueHandle<DisplayKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_EXT_debug_report === //=== VK_EXT_debug_report ===
template <typename Dispatch> template <typename Dispatch>
class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch>
@ -2118,6 +2147,15 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>; using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_INTEL_performance_query ===
template <typename Dispatch>
class UniqueHandleTraits<PerformanceConfigurationINTEL, Dispatch>
{
public:
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniquePerformanceConfigurationINTEL = UniqueHandle<PerformanceConfigurationINTEL, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_KHR_deferred_host_operations === //=== VK_KHR_deferred_host_operations ===
template <typename Dispatch> template <typename Dispatch>
class UniqueHandleTraits<DeferredOperationKHR, Dispatch> class UniqueHandleTraits<DeferredOperationKHR, Dispatch>
@ -2136,6 +2174,24 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>; using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <typename Dispatch>
class UniqueHandleTraits<CudaModuleNV, Dispatch>
{
public:
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniqueCudaModuleNV = UniqueHandle<CudaModuleNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch>
class UniqueHandleTraits<CudaFunctionNV, Dispatch>
{
public:
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniqueCudaFunctionNV = UniqueHandle<CudaFunctionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
template <typename Dispatch> template <typename Dispatch>
@ -5684,6 +5740,19 @@ namespace VULKAN_HPP_NAMESPACE
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_synchronization2 === //=== VK_KHR_synchronization2 ===
@ -7098,6 +7167,168 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
}; };
#if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaFunctionNV
{
public:
using CType = VkCudaFunctionNV;
using NativeType = VkCudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR CudaFunctionNV() = default;
VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT CudaFunctionNV( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( cudaFunctionNV ) {}
# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CudaFunctionNV & operator=( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT
{
m_cudaFunctionNV = cudaFunctionNV;
return *this;
}
# endif
CudaFunctionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_cudaFunctionNV = {};
return *this;
}
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( CudaFunctionNV const & ) const = default;
# else
bool operator==( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV == rhs.m_cudaFunctionNV;
}
bool operator!=( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV != rhs.m_cudaFunctionNV;
}
bool operator<( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV < rhs.m_cudaFunctionNV;
}
# endif
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_cudaFunctionNV == VK_NULL_HANDLE;
}
private:
VkCudaFunctionNV m_cudaFunctionNV = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV>
{
using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV;
};
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaModuleNV
{
public:
using CType = VkCudaModuleNV;
using NativeType = VkCudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR CudaModuleNV() = default;
VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT CudaModuleNV( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( cudaModuleNV ) {}
# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CudaModuleNV & operator=( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT
{
m_cudaModuleNV = cudaModuleNV;
return *this;
}
# endif
CudaModuleNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_cudaModuleNV = {};
return *this;
}
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( CudaModuleNV const & ) const = default;
# else
bool operator==( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV == rhs.m_cudaModuleNV;
}
bool operator!=( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV != rhs.m_cudaModuleNV;
}
bool operator<( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV < rhs.m_cudaModuleNV;
}
# endif
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_cudaModuleNV == VK_NULL_HANDLE;
}
private:
VkCudaModuleNV m_cudaModuleNV = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV>
{
using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV;
};
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CudaModuleNV>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
class DescriptorPool class DescriptorPool
{ {
public: public:
@ -12379,6 +12610,111 @@ namespace VULKAN_HPP_NAMESPACE
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaModuleNV>::type
createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaModuleNV, Dispatch>>::type
createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
size_t * pCacheSize,
void * pCacheData,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Uint8_tAllocator = std::allocator<uint8_t>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B1 = Uint8_tAllocator,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getCudaModuleCacheNV(
VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::type
createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CudaFunctionNV, Dispatch>>::type
createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===

View file

@ -457,6 +457,28 @@ namespace std
} }
}; };
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <>
struct hash<VULKAN_HPP_NAMESPACE::CudaModuleNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleNV const & cudaModuleNV ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkCudaModuleNV>{}( static_cast<VkCudaModuleNV>( cudaModuleNV ) );
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::CudaFunctionNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionNV const & cudaFunctionNV ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkCudaFunctionNV>{}( static_cast<VkCudaFunctionNV>( cudaFunctionNV ) );
}
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
@ -2732,6 +2754,67 @@ namespace std
} }
}; };
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
struct hash<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & cudaFunctionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.module );
for ( const char * p = cudaFunctionCreateInfoNV.pName; *p != '\0'; ++p )
{
VULKAN_HPP_HASH_COMBINE( seed, *p );
}
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
struct hash<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const & cudaLaunchInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.function );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimX );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimY );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimZ );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimX );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimY );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimZ );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sharedMemBytes );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.paramCount );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pParams );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.extraCount );
VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pExtras );
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
struct hash<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & cudaModuleCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.dataSize );
VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pData );
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_WIN32_KHR ) # if defined( VK_USE_PLATFORM_WIN32_KHR )
template <> template <>
struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR> struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
@ -3901,6 +3984,20 @@ namespace std
} }
}; };
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const & deviceQueueShaderCoreControlCreateInfoARM ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.shaderCoreCount );
return seed;
}
};
template <> template <>
struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>
{ {
@ -7652,6 +7749,39 @@ namespace std
} }
}; };
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const & physicalDeviceCudaKernelLaunchFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.cudaKernelLaunchFeatures );
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const & physicalDeviceCudaKernelLaunchPropertiesNV ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMinor );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMajor );
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <> template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>
{ {
@ -10575,6 +10705,34 @@ namespace std
} }
}; };
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const & physicalDeviceSchedulingControlsFeaturesARM ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.schedulingControls );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const & physicalDeviceSchedulingControlsPropertiesARM ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.schedulingControlsFlags );
return seed;
}
};
template <> template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>
{ {

View file

@ -1394,6 +1394,16 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) );
vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) );
vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) );
vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) );
vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) );
vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT ) # if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
@ -2295,6 +2305,23 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0;
PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0;
PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0;
PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0;
PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0;
PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0;
# else
PFN_dummy vkCreateCudaModuleNV_placeholder = 0;
PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0;
PFN_dummy vkCreateCudaFunctionNV_placeholder = 0;
PFN_dummy vkDestroyCudaModuleNV_placeholder = 0;
PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0;
PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT ) # if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
@ -2612,6 +2639,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands ===
class IndirectCommandsLayoutNV; class IndirectCommandsLayoutNV;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
class CudaModuleNV;
class CudaFunctionNV;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
class BufferCollectionFUCHSIA; class BufferCollectionFUCHSIA;
@ -4184,6 +4217,18 @@ namespace VULKAN_HPP_NAMESPACE
getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV
createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV
createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT ) # if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===
@ -5883,6 +5928,12 @@ namespace VULKAN_HPP_NAMESPACE
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT; void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_synchronization2 === //=== VK_KHR_synchronization2 ===
void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
@ -6422,6 +6473,254 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
}; };
# if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaFunctionNV
{
public:
using CType = VkCudaFunctionNV;
using CppType = vk::CudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
device.getDispatcher()->vkCreateCudaFunctionNV( static_cast<VkDevice>( *device ),
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
reinterpret_cast<VkCudaFunctionNV *>( &m_function ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
detail::throwResultException( result, "vkCreateCudaFunctionNV" );
}
}
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCudaFunctionNV function,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_function( function )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
CudaFunctionNV( std::nullptr_t ) {}
~CudaFunctionNV()
{
clear();
}
CudaFunctionNV() = delete;
CudaFunctionNV( CudaFunctionNV const & ) = delete;
CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete;
CudaFunctionNV & operator =( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_function;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_function )
{
getDispatcher()->vkDestroyCudaFunctionNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_function = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CudaFunctionNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaModuleNV
{
public:
using CType = VkCudaModuleNV;
using CppType = vk::CudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
device.getDispatcher()->vkCreateCudaModuleNV( static_cast<VkDevice>( *device ),
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
reinterpret_cast<VkCudaModuleNV *>( &m_module ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
detail::throwResultException( result, "vkCreateCudaModuleNV" );
}
}
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCudaModuleNV module,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_module( module )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
CudaModuleNV( std::nullptr_t ) {}
~CudaModuleNV()
{
clear();
}
CudaModuleNV() = delete;
CudaModuleNV( CudaModuleNV const & ) = delete;
CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
CudaModuleNV & operator=( CudaModuleNV const & ) = delete;
CudaModuleNV & operator =( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_module;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_module )
{
getDispatcher()->vkDestroyCudaModuleNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_module = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CudaModuleNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getCache() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
class DebugReportCallbackEXT class DebugReportCallbackEXT
{ {
public: public:
@ -18901,6 +19200,57 @@ namespace VULKAN_HPP_NAMESPACE
} }
# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV
Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> CudaModuleNV::getCache() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" );
std::vector<uint8_t> cacheData;
size_t cacheSize;
VkResult result;
do
{
result = getDispatcher()->vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, nullptr );
if ( ( result == VK_SUCCESS ) && cacheSize )
{
cacheData.resize( cacheSize );
result = getDispatcher()->vkGetCudaModuleCacheNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" );
VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
if ( cacheSize < cacheData.size() )
{
cacheData.resize( cacheSize );
}
return cacheData;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV
Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, createInfo, allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" );
getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT ) # if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects === //=== VK_EXT_metal_objects ===

View file

@ -654,6 +654,16 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using SharedSwapchainKHR = SharedHandle<SwapchainKHR>; using SharedSwapchainKHR = SharedHandle<SwapchainKHR>;
//=== VK_KHR_display ===
template <>
class SharedHandleTraits<DisplayKHR>
{
public:
using DestructorType = PhysicalDevice;
using deleter = ObjectDestroyShared<DisplayKHR>;
};
using SharedDisplayKHR = SharedHandle<DisplayKHR>;
//=== VK_EXT_debug_report === //=== VK_EXT_debug_report ===
template <> template <>
class SharedHandleTraits<DebugReportCallbackEXT> class SharedHandleTraits<DebugReportCallbackEXT>
@ -740,6 +750,16 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using SharedAccelerationStructureNV = SharedHandle<AccelerationStructureNV>; using SharedAccelerationStructureNV = SharedHandle<AccelerationStructureNV>;
//=== VK_INTEL_performance_query ===
template <>
class SharedHandleTraits<PerformanceConfigurationINTEL>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<PerformanceConfigurationINTEL>;
};
using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
//=== VK_KHR_deferred_host_operations === //=== VK_KHR_deferred_host_operations ===
template <> template <>
class SharedHandleTraits<DeferredOperationKHR> class SharedHandleTraits<DeferredOperationKHR>
@ -760,6 +780,26 @@ namespace VULKAN_HPP_NAMESPACE
}; };
using SharedIndirectCommandsLayoutNV = SharedHandle<IndirectCommandsLayoutNV>; using SharedIndirectCommandsLayoutNV = SharedHandle<IndirectCommandsLayoutNV>;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
template <>
class SharedHandleTraits<CudaModuleNV>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<CudaModuleNV>;
};
using SharedCudaModuleNV = SharedHandle<CudaModuleNV>;
template <>
class SharedHandleTraits<CudaFunctionNV>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<CudaFunctionNV>;
};
using SharedCudaFunctionNV = SharedHandle<CudaFunctionNV>;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA ) # if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection === //=== VK_FUCHSIA_buffer_collection ===
template <> template <>
@ -940,20 +980,6 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_display === //=== VK_KHR_display ===
template <>
class SharedHandle<DisplayKHR> : public SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>
{
friend SharedHandleBase<DisplayKHR, SharedPhysicalDevice>;
public:
SharedHandle() = default;
explicit SharedHandle( DisplayKHR handle, SharedPhysicalDevice parent ) noexcept
: SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>( handle, std::move( parent ) )
{
}
};
using SharedDisplayKHR = SharedHandle<DisplayKHR>;
template <> template <>
class SharedHandle<DisplayModeKHR> : public SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR> class SharedHandle<DisplayModeKHR> : public SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>
{ {
@ -967,22 +993,6 @@ namespace VULKAN_HPP_NAMESPACE
} }
}; };
using SharedDisplayModeKHR = SharedHandle<DisplayModeKHR>; using SharedDisplayModeKHR = SharedHandle<DisplayModeKHR>;
//=== VK_INTEL_performance_query ===
template <>
class SharedHandle<PerformanceConfigurationINTEL> : public SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>
{
friend SharedHandleBase<PerformanceConfigurationINTEL, SharedDevice>;
public:
SharedHandle() = default;
explicit SharedHandle( PerformanceConfigurationINTEL handle, SharedDevice parent ) noexcept
: SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>( handle, std::move( parent ) )
{
}
};
using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
#endif // !VULKAN_HPP_NO_SMART_HANDLE #endif // !VULKAN_HPP_NO_SMART_HANDLE
} // namespace VULKAN_HPP_NAMESPACE } // namespace VULKAN_HPP_NAMESPACE
#endif // VULKAN_SHARED_HPP #endif // VULKAN_SHARED_HPP

View file

@ -5122,6 +5122,48 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDi
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value, VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
"DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" ); "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleNV ) == sizeof( VkCudaModuleNV ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaModuleNV>::value, "CudaModuleNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaFunctionNV>::value,
"CudaFunctionNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV ) == sizeof( VkCudaModuleCreateInfoNV ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV>::value,
"CudaModuleCreateInfoNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV ) == sizeof( VkCudaFunctionCreateInfoNV ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV>::value,
"CudaFunctionCreateInfoNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV ) == sizeof( VkCudaLaunchInfoNV ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV>::value,
"CudaLaunchInfoNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchFeaturesNV ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV>::value,
"PhysicalDeviceCudaKernelLaunchFeaturesNV is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchPropertiesNV ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV>::value,
"PhysicalDeviceCudaKernelLaunchPropertiesNV is not nothrow_move_constructible!" );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_low_latency === //=== VK_NV_low_latency ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ), VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ),
@ -6207,6 +6249,31 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM>::value, VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM>::value,
"PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" ); "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" );
//=== VK_ARM_scheduling_controls ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM>::value,
"DeviceQueueShaderCoreControlCreateInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM ) ==
sizeof( VkPhysicalDeviceSchedulingControlsFeaturesARM ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM>::value,
"PhysicalDeviceSchedulingControlsFeaturesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM ) ==
sizeof( VkPhysicalDeviceSchedulingControlsPropertiesARM ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM>::value,
"PhysicalDeviceSchedulingControlsPropertiesARM is not nothrow_move_constructible!" );
//=== VK_EXT_image_sliced_view_of_3d === //=== VK_EXT_image_sliced_view_of_3d ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) == VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) ==

File diff suppressed because it is too large Load diff

View file

@ -3248,6 +3248,20 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr( 0, result.size() - 3 ) + " }"; return "{ " + result.substr( 0, result.size() - 3 ) + " }";
} }
//=== VK_ARM_scheduling_controls ===
VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagsARM value )
{
if ( !value )
return "{}";
std::string result;
if ( value & PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount )
result += "ShaderCoreCount | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
//=== VK_NV_memory_decompression === //=== VK_NV_memory_decompression ===
VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value ) VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value )
@ -4242,6 +4256,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
case StructureType::eCudaModuleCreateInfoNV: return "CudaModuleCreateInfoNV";
case StructureType::eCudaFunctionCreateInfoNV: return "CudaFunctionCreateInfoNV";
case StructureType::eCudaLaunchInfoNV: return "CudaLaunchInfoNV";
case StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV: return "PhysicalDeviceCudaKernelLaunchFeaturesNV";
case StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV: return "PhysicalDeviceCudaKernelLaunchPropertiesNV";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV"; case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV";
#if defined( VK_USE_PLATFORM_METAL_EXT ) #if defined( VK_USE_PLATFORM_METAL_EXT )
case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT"; case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT";
@ -4379,6 +4400,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT"; case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT";
case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM"; case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM";
case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM";
case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM";
case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM";
case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT"; case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT";
case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT"; case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT";
case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE"; case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE";
@ -4553,6 +4577,10 @@ namespace VULKAN_HPP_NAMESPACE
case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL";
case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR";
case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
case ObjectType::eCudaModuleNV: return "CudaModuleNV";
case ObjectType::eCudaFunctionNV: return "CudaFunctionNV";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/ #endif /*VK_USE_PLATFORM_FUCHSIA*/
@ -6947,6 +6975,10 @@ namespace VULKAN_HPP_NAMESPACE
case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX";
case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR";
case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
case DebugReportObjectTypeEXT::eCudaModuleNV: return "CudaModuleNV";
case DebugReportObjectTypeEXT::eCudaFunctionNV: return "CudaFunctionNV";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_FUCHSIA ) #if defined( VK_USE_PLATFORM_FUCHSIA )
case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/ #endif /*VK_USE_PLATFORM_FUCHSIA*/
@ -8638,6 +8670,17 @@ namespace VULKAN_HPP_NAMESPACE
} }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_ARM_scheduling_controls ===
VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagBitsARM value )
{
switch ( value )
{
case PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount: return "ShaderCoreCount";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
//=== VK_NV_memory_decompression === //=== VK_NV_memory_decompression ===
VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value ) VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value )

File diff suppressed because one or more lines are too long

View file

@ -175,7 +175,7 @@ branch of the member gitlab server.
#define <name>VKSC_API_VERSION_1_0</name> <type>VK_MAKE_API_VERSION</type>(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0</type> #define <name>VKSC_API_VERSION_1_0</name> <type>VK_MAKE_API_VERSION</type>(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0</type>
<type api="vulkan" category="define">// Version of this file <type api="vulkan" category="define">// Version of this file
#define <name>VK_HEADER_VERSION</name> 268</type> #define <name>VK_HEADER_VERSION</name> 269</type>
<type api="vulkan" category="define" requires="VK_HEADER_VERSION">// Complete version of this file <type api="vulkan" category="define" requires="VK_HEADER_VERSION">// Complete version of this file
#define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 3, VK_HEADER_VERSION)</type> #define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 3, VK_HEADER_VERSION)</type>
<type api="vulkansc" category="define">// Version of this file <type api="vulkansc" category="define">// Version of this file
@ -482,6 +482,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type requires="VkPresentScalingFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentScalingFlagsEXT</name>;</type> <type requires="VkPresentScalingFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentScalingFlagsEXT</name>;</type>
<type requires="VkPresentGravityFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentGravityFlagsEXT</name>;</type> <type requires="VkPresentGravityFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPresentGravityFlagsEXT</name>;</type>
<type requires="VkShaderCreateFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkShaderCreateFlagsEXT</name>;</type> <type requires="VkShaderCreateFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkShaderCreateFlagsEXT</name>;</type>
<type bitvalues="VkPhysicalDeviceSchedulingControlsFlagBitsARM" category="bitmask">typedef <type>VkFlags64</type> <name>VkPhysicalDeviceSchedulingControlsFlagsARM</name>;</type>
<comment>Video Core extension</comment> <comment>Video Core extension</comment>
<type requires="VkVideoCodecOperationFlagBitsKHR" category="bitmask">typedef <type>VkFlags</type> <name>VkVideoCodecOperationFlagsKHR</name>;</type> <type requires="VkVideoCodecOperationFlagBitsKHR" category="bitmask">typedef <type>VkFlags</type> <name>VkVideoCodecOperationFlagsKHR</name>;</type>
@ -867,6 +868,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type name="VkPresentGravityFlagBitsEXT" category="enum"/> <type name="VkPresentGravityFlagBitsEXT" category="enum"/>
<type name="VkLatencyMarkerNV" category="enum"/> <type name="VkLatencyMarkerNV" category="enum"/>
<type name="VkOutOfBandQueueTypeNV" category="enum"/> <type name="VkOutOfBandQueueTypeNV" category="enum"/>
<type name="VkPhysicalDeviceSchedulingControlsFlagBitsARM" category="enum"/>
<comment>Enumerated types in the header, but not used by the API</comment> <comment>Enumerated types in the header, but not used by the API</comment>
<type name="VkVendorId" category="enum"/> <type name="VkVendorId" category="enum"/>
@ -7599,6 +7601,36 @@ typedef void* <name>MTLSharedEvent_id</name>;
<member><type>uint32_t</type> <name>minBufferCountForDedicatedSlack</name></member> <member><type>uint32_t</type> <name>minBufferCountForDedicatedSlack</name></member>
<member><type>uint32_t</type> <name>minBufferCountForSharedSlack</name></member> <member><type>uint32_t</type> <name>minBufferCountForSharedSlack</name></member>
</type> </type>
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_CUDA_MODULE_NV"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkCudaModuleNV</name>)</type>
<type category="handle" parent="VkDevice" objtypeenum="VK_OBJECT_TYPE_CUDA_FUNCTION_NV"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkCudaFunctionNV</name>)</type>
<type category="struct" name="VkCudaModuleCreateInfoNV">
<member values="VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member>
<member><type>size_t</type> <name>dataSize</name></member>
<member len="dataSize">const <type>void</type>* <name>pData</name></member>
</type>
<type category="struct" name="VkCudaFunctionCreateInfoNV">
<member values="VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member>
<member><type>VkCudaModuleNV</type> <name>module</name></member>
<member len="null-terminated">const <type>char</type>* <name>pName</name></member>
</type>
<type category="struct" name="VkCudaLaunchInfoNV">
<member values="VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member>
<member><type>VkCudaFunctionNV</type> <name>function</name></member>
<member><type>uint32_t</type> <name>gridDimX</name></member>
<member><type>uint32_t</type> <name>gridDimY</name></member>
<member><type>uint32_t</type> <name>gridDimZ</name></member>
<member><type>uint32_t</type> <name>blockDimX</name></member>
<member><type>uint32_t</type> <name>blockDimY</name></member>
<member><type>uint32_t</type> <name>blockDimZ</name></member>
<member><type>uint32_t</type> <name>sharedMemBytes</name></member>
<member optional="true"><type>size_t</type> <name>paramCount</name></member>
<member optional="true" len="paramCount">const <type>void</type>* const * <name>pParams</name></member>
<member optional="true"><type>size_t</type> <name>extraCount</name></member>
<member optional="true" len="extraCount">const <type>void</type>* const * <name>pExtras</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo"> <type category="struct" name="VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member> <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true" noautovalidity="true"><type>void</type>* <name>pNext</name></member> <member optional="true" noautovalidity="true"><type>void</type>* <name>pNext</name></member>
@ -8776,9 +8808,35 @@ typedef void* <name>MTLSharedEvent_id</name>;
<type category="struct" name="VkLatencySurfaceCapabilitiesNV" structextends="VkSurfaceCapabilities2KHR"> <type category="struct" name="VkLatencySurfaceCapabilitiesNV" structextends="VkSurfaceCapabilities2KHR">
<member values="VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV"><type>VkStructureType</type> <name>sType</name></member> <member values="VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true">const <type>void</type>* <name>pNext</name></member> <member optional="true">const <type>void</type>* <name>pNext</name></member>
<member optional="false,true"><type>uint32_t</type> <name>presentModeCount</name></member> <member optional="true"><type>uint32_t</type> <name>presentModeCount</name></member>
<member optional="true" len="presentModeCount"><type>VkPresentModeKHR</type>* <name>pPresentModes</name></member> <member optional="true" len="presentModeCount"><type>VkPresentModeKHR</type>* <name>pPresentModes</name></member>
</type> </type>
<type category="struct" name="VkPhysicalDeviceCudaKernelLaunchFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkBool32</type> <name>cudaKernelLaunchFeatures</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceCudaKernelLaunchPropertiesNV" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member limittype="max"><type>uint32_t</type> <name>computeCapabilityMinor</name></member>
<member limittype="min"><type>uint32_t</type> <name>computeCapabilityMajor</name></member>
</type>
<type category="struct" name="VkDeviceQueueShaderCoreControlCreateInfoARM" structextends="VkDeviceQueueCreateInfo,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>uint32_t</type> <name>shaderCoreCount</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceSchedulingControlsFeaturesARM" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkBool32</type> <name>schedulingControls</name></member>
</type>
<type category="struct" name="VkPhysicalDeviceSchedulingControlsPropertiesARM" structextends="VkPhysicalDeviceProperties2">
<member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM"><type>VkStructureType</type> <name>sType</name></member>
<member optional="true"><type>void</type>* <name>pNext</name></member>
<member><type>VkPhysicalDeviceSchedulingControlsFlagsARM</type> <name>schedulingControlsFlags</name></member>
</type>
</types> </types>
@ -10436,6 +10494,9 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum bitpos="1" name="VK_PRESENT_GRAVITY_MAX_BIT_EXT"/> <enum bitpos="1" name="VK_PRESENT_GRAVITY_MAX_BIT_EXT"/>
<enum bitpos="2" name="VK_PRESENT_GRAVITY_CENTERED_BIT_EXT"/> <enum bitpos="2" name="VK_PRESENT_GRAVITY_CENTERED_BIT_EXT"/>
</enums> </enums>
<enums name="VkPhysicalDeviceSchedulingControlsFlagBitsARM" type="bitmask">
<enum bitpos="0" name="VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM"/>
</enums>
<enums name="VkVideoCodecOperationFlagBitsKHR" type="bitmask"> <enums name="VkVideoCodecOperationFlagBitsKHR" type="bitmask">
<enum value="0" name="VK_VIDEO_CODEC_OPERATION_NONE_KHR"/> <enum value="0" name="VK_VIDEO_CODEC_OPERATION_NONE_KHR"/>
@ -13748,7 +13809,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
<param><type>VkDevice</type> <name>device</name></param> <param><type>VkDevice</type> <name>device</name></param>
<param><type>VkFaultQueryBehavior</type> <name>faultQueryBehavior</name></param> <param><type>VkFaultQueryBehavior</type> <name>faultQueryBehavior</name></param>
<param><type>VkBool32</type>* <name>pUnrecordedFaults</name></param> <param><type>VkBool32</type>* <name>pUnrecordedFaults</name></param>
<param><type>uint32_t</type>* <name>pFaultCount</name></param> <param optional="false,true"><type>uint32_t</type>* <name>pFaultCount</name></param>
<param optional="true" len="pFaultCount"><type>VkFaultData</type>* <name>pFaults</name></param> <param optional="true" len="pFaultCount"><type>VkFaultData</type>* <name>pFaults</name></param>
</command> </command>
<command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY"> <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY">
@ -14596,6 +14657,44 @@ typedef void* <name>MTLSharedEvent_id</name>;
<param><type>VkBufferCollectionFUCHSIA</type> <name>collection</name></param> <param><type>VkBufferCollectionFUCHSIA</type> <name>collection</name></param>
<param><type>VkBufferCollectionPropertiesFUCHSIA</type>* <name>pProperties</name></param> <param><type>VkBufferCollectionPropertiesFUCHSIA</type>* <name>pProperties</name></param>
</command> </command>
<command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_OUT_OF_HOST_MEMORY">
<proto><type>VkResult</type> <name>vkCreateCudaModuleNV</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param>const <type>VkCudaModuleCreateInfoNV</type>* <name>pCreateInfo</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
<param><type>VkCudaModuleNV</type>* <name>pModule</name></param>
</command>
<command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_INITIALIZATION_FAILED">
<proto><type>VkResult</type> <name>vkGetCudaModuleCacheNV</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param><type>VkCudaModuleNV</type> <name>module</name></param>
<param optional="false,true"><type>size_t</type>* <name>pCacheSize</name></param>
<param optional="true" len="pCacheSize"><type>void</type>* <name>pCacheData</name></param>
</command>
<command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_OUT_OF_HOST_MEMORY">
<proto><type>VkResult</type> <name>vkCreateCudaFunctionNV</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param>const <type>VkCudaFunctionCreateInfoNV</type>* <name>pCreateInfo</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
<param><type>VkCudaFunctionNV</type>* <name>pFunction</name></param>
</command>
<command>
<proto><type>void</type> <name>vkDestroyCudaModuleNV</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param><type>VkCudaModuleNV</type> <name>module</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
</command>
<command>
<proto><type>void</type> <name>vkDestroyCudaFunctionNV</name></proto>
<param><type>VkDevice</type> <name>device</name></param>
<param><type>VkCudaFunctionNV</type> <name>function</name></param>
<param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
</command>
<command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" tasks="action">
<proto><type>void</type> <name>vkCmdCudaLaunchKernelNV</name></proto>
<param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
<param>const <type>VkCudaLaunchInfoNV</type>* <name>pLaunchInfo</name></param>
</command>
<command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" tasks="action,state"> <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" tasks="action,state">
<proto><type>void</type> <name>vkCmdBeginRendering</name></proto> <proto><type>void</type> <name>vkCmdBeginRendering</name></proto>
<param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param> <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
@ -20698,10 +20797,32 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum value="&quot;VK_QCOM_extension_307&quot;" name="VK_QCOM_EXTENSION_307_EXTENSION_NAME"/> <enum value="&quot;VK_QCOM_extension_307&quot;" name="VK_QCOM_EXTENSION_307_EXTENSION_NAME"/>
</require> </require>
</extension> </extension>
<extension name="VK_NV_extension_308" number="308" type="device" author="NV" contact="Tristan Lorach @tlorach" supported="disabled"> <extension name="VK_NV_cuda_kernel_launch" number="308" type="device" author="NV" contact="Tristan Lorach @tlorach" supported="vulkan" provisional="true">
<require> <require>
<enum value="0" name="VK_NV_EXTENSION_308_SPEC_VERSION"/> <enum value="2" name="VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION"/>
<enum value="&quot;VK_NV_extension_308&quot;" name="VK_NV_EXTENSION_308_EXTENSION_NAME"/> <enum value="&quot;VK_NV_cuda_kernel_launch&quot;" name="VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME"/>
<enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV"/>
<enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV"/>
<enum offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV"/>
<enum offset="3" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV"/>
<enum offset="4" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV"/>
<enum offset="0" extends="VkObjectType" name="VK_OBJECT_TYPE_CUDA_MODULE_NV"/>
<enum offset="1" extends="VkObjectType" name="VK_OBJECT_TYPE_CUDA_FUNCTION_NV"/>
<enum offset="0" extends="VkDebugReportObjectTypeEXT" name="VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV"/>
<enum offset="1" extends="VkDebugReportObjectTypeEXT" name="VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV"/>
<type name="VkCudaModuleNV"/>
<type name="VkCudaFunctionNV"/>
<type name="VkCudaModuleCreateInfoNV"/>
<type name="VkCudaFunctionCreateInfoNV"/>
<type name="VkCudaLaunchInfoNV"/>
<type name="VkPhysicalDeviceCudaKernelLaunchFeaturesNV"/>
<type name="VkPhysicalDeviceCudaKernelLaunchPropertiesNV"/>
<command name="vkCreateCudaModuleNV"/>
<command name="vkGetCudaModuleCacheNV"/>
<command name="vkCreateCudaFunctionNV"/>
<command name="vkDestroyCudaModuleNV"/>
<command name="vkDestroyCudaFunctionNV"/>
<command name="vkCmdCudaLaunchKernelNV"/>
</require> </require>
</extension> </extension>
<extension name="VK_KHR_object_refresh" number="309" type="device" author="KHR" contact="Aidan Fabius @afabius" supported="vulkansc" ratified="vulkansc"> <extension name="VK_KHR_object_refresh" number="309" type="device" author="KHR" contact="Aidan Fabius @afabius" supported="vulkansc" ratified="vulkansc">
@ -22067,10 +22188,18 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum value="&quot;VK_KHR_extension_417&quot;" name="VK_KHR_EXTENSION_417_EXTENSION_NAME"/> <enum value="&quot;VK_KHR_extension_417&quot;" name="VK_KHR_EXTENSION_417_EXTENSION_NAME"/>
</require> </require>
</extension> </extension>
<extension name="VK_ARM_extension_418" number="418" author="ARM" contact="Kevin Petit @kpet" supported="disabled"> <extension name="VK_ARM_scheduling_controls" number="418" author="ARM" contact="Kevin Petit @kpet" type="device" depends="VK_ARM_shader_core_builtins" supported="vulkan">
<require> <require>
<enum value="0" name="VK_ARM_EXTENSION_418_SPEC_VERSION"/> <enum value="1" name="VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION"/>
<enum value="&quot;VK_ARM_extension_418&quot;" name="VK_ARM_EXTENSION_418_EXTENSION_NAME"/> <enum value="&quot;VK_ARM_scheduling_controls&quot;" name="VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME"/>
<enum extends="VkStructureType" offset="0" name="VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM"/>
<enum extends="VkStructureType" offset="1" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM"/>
<enum extends="VkStructureType" offset="2" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM"/>
<type name="VkDeviceQueueShaderCoreControlCreateInfoARM"/>
<type name="VkPhysicalDeviceSchedulingControlsFeaturesARM"/>
<type name="VkPhysicalDeviceSchedulingControlsPropertiesARM"/>
<type name="VkPhysicalDeviceSchedulingControlsFlagsARM"/>
<type name="VkPhysicalDeviceSchedulingControlsFlagBitsARM"/>
</require> </require>
</extension> </extension>
<extension name="VK_EXT_image_sliced_view_of_3d" number="419" depends="VK_KHR_maintenance1+VK_KHR_get_physical_device_properties2" author="EXT" contact="Mike Blumenkrantz @zmike" specialuse="d3demulation" type="device" supported="vulkan"> <extension name="VK_EXT_image_sliced_view_of_3d" number="419" depends="VK_KHR_maintenance1+VK_KHR_get_physical_device_properties2" author="EXT" contact="Mike Blumenkrantz @zmike" specialuse="d3demulation" type="device" supported="vulkan">
@ -22683,8 +22812,8 @@ typedef void* <name>MTLSharedEvent_id</name>;
<command name="vkCmdBindIndexBuffer2KHR"/> <command name="vkCmdBindIndexBuffer2KHR"/>
<command name="vkGetRenderingAreaGranularityKHR"/> <command name="vkGetRenderingAreaGranularityKHR"/>
<type name="VkRenderingAreaInfoKHR"/> <type name="VkRenderingAreaInfoKHR"/>
<command name ="vkGetDeviceImageSubresourceLayoutKHR"/> <command name="vkGetDeviceImageSubresourceLayoutKHR"/>
<command name ="vkGetImageSubresourceLayout2KHR"/> <command name="vkGetImageSubresourceLayout2KHR"/>
<type name="VkDeviceImageSubresourceInfoKHR"/> <type name="VkDeviceImageSubresourceInfoKHR"/>
<type name="VkImageSubresource2KHR"/> <type name="VkImageSubresource2KHR"/>
<type name="VkSubresourceLayout2KHR"/> <type name="VkSubresourceLayout2KHR"/>