use std::fmt;
use std::os::raw::*;
#[doc = r" Iterates through the pointer chain. Includes the item that is passed into the function."]
#[doc = r" Stops at the last `BaseOutStructure` that has a null `p_next` field."]
pub(crate) unsafe fn ptr_chain_iter<T>(ptr: &mut T) -> impl Iterator<Item = *mut BaseOutStructure> {
use std::ptr::null_mut;
let ptr: *mut BaseOutStructure = ptr as *mut T as _;
(0..).scan(ptr, |p_ptr, _| {
if *p_ptr == null_mut() {
return None;
}
let n_ptr = (**p_ptr).p_next as *mut BaseOutStructure;
let old = *p_ptr;
*p_ptr = n_ptr;
Some(old)
})
}
pub trait Handle {
const TYPE: ObjectType;
fn as_raw(self) -> u64;
fn from_raw(u64) -> Self;
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VK_MAKE_VERSION.html>"]
#[macro_export]
macro_rules! vk_make_version {
( $ major : expr , $ minor : expr , $ patch : expr ) => {
(($major as u32) << 22) | (($minor as u32) << 12) | $patch as u32
};
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VK_VERSION_MAJOR.html>"]
#[macro_export]
macro_rules! vk_version_major {
( $ major : expr ) => {
($major as u32) >> 22
};
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VK_VERSION_MINOR.html>"]
#[macro_export]
macro_rules! vk_version_minor {
( $ minor : expr ) => {
(($minor as u32) >> 12) & 0x3ff
};
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VK_VERSION_PATCH.html>"]
#[macro_export]
macro_rules! vk_version_patch {
( $ minor : expr ) => {
($minor as u32) & 0xfff
};
}
pub type RROutput = c_ulong;
pub type VisualID = c_uint;
pub type Display = *const c_void;
pub type Window = c_ulong;
#[allow(non_camel_case_types)]
pub type xcb_connection_t = *const c_void;
#[allow(non_camel_case_types)]
pub type xcb_window_t = u32;
#[allow(non_camel_case_types)]
pub type xcb_visualid_t = *const c_void;
pub type MirConnection = *const c_void;
pub type MirSurface = *const c_void;
pub type HINSTANCE = *const c_void;
pub type HWND = *const c_void;
#[allow(non_camel_case_types)]
pub type wl_display = c_void;
#[allow(non_camel_case_types)]
pub type wl_surface = c_void;
pub type HANDLE = *mut c_void;
pub type DWORD = c_ulong;
pub type LPCWSTR = *const u16;
#[allow(non_camel_case_types)]
pub type zx_handle_t = u32;
#[allow(non_camel_case_types)]
pub type SECURITY_ATTRIBUTES = ();
pub type ANativeWindow = c_void;
pub type AHardwareBuffer = c_void;
#[macro_export]
macro_rules! vk_bitflags_wrapped {
( $ name : ident , $ all : expr , $ flag_type : ty ) => {
impl Default for $name {
fn default() -> $name {
$name(0)
}
}
impl $name {
#[inline]
pub fn empty() -> $name {
$name(0)
}
#[inline]
pub fn all() -> $name {
$name($all)
}
#[inline]
pub fn from_raw(x: $flag_type) -> Self {
$name(x)
}
#[inline]
pub fn as_raw(self) -> $flag_type {
self.0
}
#[inline]
pub fn is_empty(self) -> bool {
self == $name::empty()
}
#[inline]
pub fn is_all(self) -> bool {
self & $name::all() == $name::all()
}
#[inline]
pub fn intersects(self, other: $name) -> bool {
self & other != $name::empty()
}
#[doc = r" Returns whether `other` is a subset of `self`"]
#[inline]
pub fn contains(self, other: $name) -> bool {
self & other == other
}
}
impl ::std::ops::BitOr for $name {
type Output = $name;
#[inline]
fn bitor(self, rhs: $name) -> $name {
$name(self.0 | rhs.0)
}
}
impl ::std::ops::BitOrAssign for $name {
#[inline]
fn bitor_assign(&mut self, rhs: $name) {
*self = *self | rhs
}
}
impl ::std::ops::BitAnd for $name {
type Output = $name;
#[inline]
fn bitand(self, rhs: $name) -> $name {
$name(self.0 & rhs.0)
}
}
impl ::std::ops::BitAndAssign for $name {
#[inline]
fn bitand_assign(&mut self, rhs: $name) {
*self = *self & rhs
}
}
impl ::std::ops::BitXor for $name {
type Output = $name;
#[inline]
fn bitxor(self, rhs: $name) -> $name {
$name(self.0 ^ rhs.0)
}
}
impl ::std::ops::BitXorAssign for $name {
#[inline]
fn bitxor_assign(&mut self, rhs: $name) {
*self = *self ^ rhs
}
}
impl ::std::ops::Sub for $name {
type Output = $name;
#[inline]
fn sub(self, rhs: $name) -> $name {
self & !rhs
}
}
impl ::std::ops::SubAssign for $name {
#[inline]
fn sub_assign(&mut self, rhs: $name) {
*self = *self - rhs
}
}
impl ::std::ops::Not for $name {
type Output = $name;
#[inline]
fn not(self) -> $name {
self ^ $name::all()
}
}
};
}
#[macro_export]
macro_rules! handle_nondispatchable {
( $ name : ident , $ ty : ident ) => {
handle_nondispatchable!($name, $ty, doc = "");
};
( $ name : ident , $ ty : ident , $ doc_link : meta ) => {
#[repr(transparent)]
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Default)]
#[$doc_link]
pub struct $name(u64);
impl Handle for $name {
const TYPE: ObjectType = ObjectType::$ty;
fn as_raw(self) -> u64 {
self.0 as u64
}
fn from_raw(x: u64) -> Self {
$name(x as _)
}
}
impl $name {
pub fn null() -> $name {
$name(0)
}
}
impl fmt::Pointer for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:x}", self.0)
}
}
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:x}", self.0)
}
}
};
}
#[macro_export]
macro_rules! define_handle {
( $ name : ident , $ ty : ident ) => {
define_handle!($name, $ty, doc = "");
};
( $ name : ident , $ ty : ident , $ doc_link : meta ) => {
#[repr(transparent)]
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash)]
#[$doc_link]
pub struct $name(*mut u8);
impl Default for $name {
fn default() -> $name {
$name::null()
}
}
impl Handle for $name {
const TYPE: ObjectType = ObjectType::$ty;
fn as_raw(self) -> u64 {
self.0 as u64
}
fn from_raw(x: u64) -> Self {
$name(x as _)
}
}
unsafe impl Send for $name {}
unsafe impl Sync for $name {}
impl $name {
pub fn null() -> Self {
$name(::std::ptr::null_mut())
}
}
impl fmt::Pointer for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.0, f)
}
}
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
};
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetInstanceProcAddr =
extern "system" fn(instance: Instance, p_name: *const c_char) -> PFN_vkVoidFunction;
pub struct StaticFn {
pub get_instance_proc_addr:
extern "system" fn(instance: Instance, p_name: *const c_char) -> PFN_vkVoidFunction,
}
unsafe impl Send for StaticFn {}
unsafe impl Sync for StaticFn {}
impl ::std::clone::Clone for StaticFn {
fn clone(&self) -> Self {
StaticFn {
get_instance_proc_addr: self.get_instance_proc_addr,
}
}
}
impl StaticFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
StaticFn {
get_instance_proc_addr: unsafe {
extern "system" fn get_instance_proc_addr(
_instance: Instance,
_p_name: *const c_char,
) -> PFN_vkVoidFunction {
panic!(concat!(
"Unable to load ",
stringify!(get_instance_proc_addr)
))
}
let raw_name = stringify!(vkGetInstanceProcAddr);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_instance_proc_addr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetInstanceProcAddr.html>"]
pub unsafe fn get_instance_proc_addr(
&self,
instance: Instance,
p_name: *const c_char,
) -> PFN_vkVoidFunction {
(self.get_instance_proc_addr)(instance, p_name)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateInstance = extern "system" fn(
p_create_info: *const InstanceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_instance: *mut Instance,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkEnumerateInstanceExtensionProperties = extern "system" fn(
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkEnumerateInstanceLayerProperties =
extern "system" fn(p_property_count: *mut u32, p_properties: *mut LayerProperties) -> Result;
pub struct EntryFnV1_0 {
pub create_instance: extern "system" fn(
p_create_info: *const InstanceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_instance: *mut Instance,
) -> Result,
pub enumerate_instance_extension_properties: extern "system" fn(
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result,
pub enumerate_instance_layer_properties: extern "system" fn(
p_property_count: *mut u32,
p_properties: *mut LayerProperties,
) -> Result,
}
unsafe impl Send for EntryFnV1_0 {}
unsafe impl Sync for EntryFnV1_0 {}
impl ::std::clone::Clone for EntryFnV1_0 {
fn clone(&self) -> Self {
EntryFnV1_0 {
create_instance: self.create_instance,
enumerate_instance_extension_properties: self.enumerate_instance_extension_properties,
enumerate_instance_layer_properties: self.enumerate_instance_layer_properties,
}
}
}
impl EntryFnV1_0 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
EntryFnV1_0 {
create_instance: unsafe {
extern "system" fn create_instance(
_p_create_info: *const InstanceCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_instance: *mut Instance,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_instance)))
}
let raw_name = stringify!(vkCreateInstance);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_instance
} else {
::std::mem::transmute(val)
}
},
enumerate_instance_extension_properties: unsafe {
extern "system" fn enumerate_instance_extension_properties(
_p_layer_name: *const c_char,
_p_property_count: *mut u32,
_p_properties: *mut ExtensionProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_instance_extension_properties)
))
}
let raw_name = stringify!(vkEnumerateInstanceExtensionProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_instance_extension_properties
} else {
::std::mem::transmute(val)
}
},
enumerate_instance_layer_properties: unsafe {
extern "system" fn enumerate_instance_layer_properties(
_p_property_count: *mut u32,
_p_properties: *mut LayerProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_instance_layer_properties)
))
}
let raw_name = stringify!(vkEnumerateInstanceLayerProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_instance_layer_properties
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateInstance.html>"]
pub unsafe fn create_instance(
&self,
p_create_info: *const InstanceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_instance: *mut Instance,
) -> Result {
(self.create_instance)(p_create_info, p_allocator, p_instance)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceExtensionProperties.html>"]
pub unsafe fn enumerate_instance_extension_properties(
&self,
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result {
(self.enumerate_instance_extension_properties)(p_layer_name, p_property_count, p_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceLayerProperties.html>"]
pub unsafe fn enumerate_instance_layer_properties(
&self,
p_property_count: *mut u32,
p_properties: *mut LayerProperties,
) -> Result {
(self.enumerate_instance_layer_properties)(p_property_count, p_properties)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyInstance =
extern "system" fn(instance: Instance, p_allocator: *const AllocationCallbacks) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkEnumeratePhysicalDevices = extern "system" fn(
instance: Instance,
p_physical_device_count: *mut u32,
p_physical_devices: *mut PhysicalDevice,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceFeatures = extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceFormatProperties = extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceImageFormatProperties = extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
p_image_format_properties: *mut ImageFormatProperties,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceMemoryProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceProcAddr =
extern "system" fn(device: Device, p_name: *const c_char) -> PFN_vkVoidFunction;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDevice = extern "system" fn(
physical_device: PhysicalDevice,
p_create_info: *const DeviceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_device: *mut Device,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkEnumerateDeviceExtensionProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkEnumerateDeviceLayerProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut LayerProperties,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties = extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
samples: SampleCountFlags,
usage: ImageUsageFlags,
tiling: ImageTiling,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties,
) -> c_void;
pub struct InstanceFnV1_0 {
pub destroy_instance:
extern "system" fn(instance: Instance, p_allocator: *const AllocationCallbacks) -> c_void,
pub enumerate_physical_devices: extern "system" fn(
instance: Instance,
p_physical_device_count: *mut u32,
p_physical_devices: *mut PhysicalDevice,
) -> Result,
pub get_physical_device_features: extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures,
) -> c_void,
pub get_physical_device_format_properties: extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties,
) -> c_void,
pub get_physical_device_image_format_properties: extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
p_image_format_properties: *mut ImageFormatProperties,
) -> Result,
pub get_physical_device_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties,
) -> c_void,
pub get_physical_device_queue_family_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties,
) -> c_void,
pub get_physical_device_memory_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties,
) -> c_void,
pub get_device_proc_addr:
extern "system" fn(device: Device, p_name: *const c_char) -> PFN_vkVoidFunction,
pub create_device: extern "system" fn(
physical_device: PhysicalDevice,
p_create_info: *const DeviceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_device: *mut Device,
) -> Result,
pub enumerate_device_extension_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result,
pub enumerate_device_layer_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut LayerProperties,
) -> Result,
pub get_physical_device_sparse_image_format_properties: extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
samples: SampleCountFlags,
usage: ImageUsageFlags,
tiling: ImageTiling,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties,
) -> c_void,
}
unsafe impl Send for InstanceFnV1_0 {}
unsafe impl Sync for InstanceFnV1_0 {}
impl ::std::clone::Clone for InstanceFnV1_0 {
fn clone(&self) -> Self {
InstanceFnV1_0 {
destroy_instance: self.destroy_instance,
enumerate_physical_devices: self.enumerate_physical_devices,
get_physical_device_features: self.get_physical_device_features,
get_physical_device_format_properties: self.get_physical_device_format_properties,
get_physical_device_image_format_properties: self
.get_physical_device_image_format_properties,
get_physical_device_properties: self.get_physical_device_properties,
get_physical_device_queue_family_properties: self
.get_physical_device_queue_family_properties,
get_physical_device_memory_properties: self.get_physical_device_memory_properties,
get_device_proc_addr: self.get_device_proc_addr,
create_device: self.create_device,
enumerate_device_extension_properties: self.enumerate_device_extension_properties,
enumerate_device_layer_properties: self.enumerate_device_layer_properties,
get_physical_device_sparse_image_format_properties: self
.get_physical_device_sparse_image_format_properties,
}
}
}
impl InstanceFnV1_0 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
InstanceFnV1_0 {
destroy_instance: unsafe {
extern "system" fn destroy_instance(
_instance: Instance,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_instance)))
}
let raw_name = stringify!(vkDestroyInstance);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_instance
} else {
::std::mem::transmute(val)
}
},
enumerate_physical_devices: unsafe {
extern "system" fn enumerate_physical_devices(
_instance: Instance,
_p_physical_device_count: *mut u32,
_p_physical_devices: *mut PhysicalDevice,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_physical_devices)
))
}
let raw_name = stringify!(vkEnumeratePhysicalDevices);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_physical_devices
} else {
::std::mem::transmute(val)
}
},
get_physical_device_features: unsafe {
extern "system" fn get_physical_device_features(
_physical_device: PhysicalDevice,
_p_features: *mut PhysicalDeviceFeatures,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_features)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceFeatures);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_features
} else {
::std::mem::transmute(val)
}
},
get_physical_device_format_properties: unsafe {
extern "system" fn get_physical_device_format_properties(
_physical_device: PhysicalDevice,
_format: Format,
_p_format_properties: *mut FormatProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_format_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceFormatProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_format_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_image_format_properties: unsafe {
extern "system" fn get_physical_device_image_format_properties(
_physical_device: PhysicalDevice,
_format: Format,
_ty: ImageType,
_tiling: ImageTiling,
_usage: ImageUsageFlags,
_flags: ImageCreateFlags,
_p_image_format_properties: *mut ImageFormatProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_image_format_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceImageFormatProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_image_format_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_properties: unsafe {
extern "system" fn get_physical_device_properties(
_physical_device: PhysicalDevice,
_p_properties: *mut PhysicalDeviceProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_queue_family_properties: unsafe {
extern "system" fn get_physical_device_queue_family_properties(
_physical_device: PhysicalDevice,
_p_queue_family_property_count: *mut u32,
_p_queue_family_properties: *mut QueueFamilyProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_queue_family_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceQueueFamilyProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_queue_family_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_memory_properties: unsafe {
extern "system" fn get_physical_device_memory_properties(
_physical_device: PhysicalDevice,
_p_memory_properties: *mut PhysicalDeviceMemoryProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_memory_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceMemoryProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_memory_properties
} else {
::std::mem::transmute(val)
}
},
get_device_proc_addr: unsafe {
extern "system" fn get_device_proc_addr(
_device: Device,
_p_name: *const c_char,
) -> PFN_vkVoidFunction {
panic!(concat!("Unable to load ", stringify!(get_device_proc_addr)))
}
let raw_name = stringify!(vkGetDeviceProcAddr);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_proc_addr
} else {
::std::mem::transmute(val)
}
},
create_device: unsafe {
extern "system" fn create_device(
_physical_device: PhysicalDevice,
_p_create_info: *const DeviceCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_device: *mut Device,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_device)))
}
let raw_name = stringify!(vkCreateDevice);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_device
} else {
::std::mem::transmute(val)
}
},
enumerate_device_extension_properties: unsafe {
extern "system" fn enumerate_device_extension_properties(
_physical_device: PhysicalDevice,
_p_layer_name: *const c_char,
_p_property_count: *mut u32,
_p_properties: *mut ExtensionProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_device_extension_properties)
))
}
let raw_name = stringify!(vkEnumerateDeviceExtensionProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_device_extension_properties
} else {
::std::mem::transmute(val)
}
},
enumerate_device_layer_properties: unsafe {
extern "system" fn enumerate_device_layer_properties(
_physical_device: PhysicalDevice,
_p_property_count: *mut u32,
_p_properties: *mut LayerProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_device_layer_properties)
))
}
let raw_name = stringify!(vkEnumerateDeviceLayerProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_device_layer_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_sparse_image_format_properties: unsafe {
extern "system" fn get_physical_device_sparse_image_format_properties(
_physical_device: PhysicalDevice,
_format: Format,
_ty: ImageType,
_samples: SampleCountFlags,
_usage: ImageUsageFlags,
_tiling: ImageTiling,
_p_property_count: *mut u32,
_p_properties: *mut SparseImageFormatProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_sparse_image_format_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSparseImageFormatProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_sparse_image_format_properties
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyInstance.html>"]
pub unsafe fn destroy_instance(
&self,
instance: Instance,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_instance)(instance, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDevices.html>"]
pub unsafe fn enumerate_physical_devices(
&self,
instance: Instance,
p_physical_device_count: *mut u32,
p_physical_devices: *mut PhysicalDevice,
) -> Result {
(self.enumerate_physical_devices)(instance, p_physical_device_count, p_physical_devices)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceFeatures.html>"]
pub unsafe fn get_physical_device_features(
&self,
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures,
) -> c_void {
(self.get_physical_device_features)(physical_device, p_features)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceFormatProperties.html>"]
pub unsafe fn get_physical_device_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties,
) -> c_void {
(self.get_physical_device_format_properties)(physical_device, format, p_format_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceImageFormatProperties.html>"]
pub unsafe fn get_physical_device_image_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
p_image_format_properties: *mut ImageFormatProperties,
) -> Result {
(self.get_physical_device_image_format_properties)(
physical_device,
format,
ty,
tiling,
usage,
flags,
p_image_format_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceProperties.html>"]
pub unsafe fn get_physical_device_properties(
&self,
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties,
) -> c_void {
(self.get_physical_device_properties)(physical_device, p_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html>"]
pub unsafe fn get_physical_device_queue_family_properties(
&self,
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties,
) -> c_void {
(self.get_physical_device_queue_family_properties)(
physical_device,
p_queue_family_property_count,
p_queue_family_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceMemoryProperties.html>"]
pub unsafe fn get_physical_device_memory_properties(
&self,
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties,
) -> c_void {
(self.get_physical_device_memory_properties)(physical_device, p_memory_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceProcAddr.html>"]
pub unsafe fn get_device_proc_addr(
&self,
device: Device,
p_name: *const c_char,
) -> PFN_vkVoidFunction {
(self.get_device_proc_addr)(device, p_name)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDevice.html>"]
pub unsafe fn create_device(
&self,
physical_device: PhysicalDevice,
p_create_info: *const DeviceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_device: *mut Device,
) -> Result {
(self.create_device)(physical_device, p_create_info, p_allocator, p_device)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateDeviceExtensionProperties.html>"]
pub unsafe fn enumerate_device_extension_properties(
&self,
physical_device: PhysicalDevice,
p_layer_name: *const c_char,
p_property_count: *mut u32,
p_properties: *mut ExtensionProperties,
) -> Result {
(self.enumerate_device_extension_properties)(
physical_device,
p_layer_name,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateDeviceLayerProperties.html>"]
pub unsafe fn enumerate_device_layer_properties(
&self,
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut LayerProperties,
) -> Result {
(self.enumerate_device_layer_properties)(physical_device, p_property_count, p_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html>"]
pub unsafe fn get_physical_device_sparse_image_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
samples: SampleCountFlags,
usage: ImageUsageFlags,
tiling: ImageTiling,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties,
) -> c_void {
(self.get_physical_device_sparse_image_format_properties)(
physical_device,
format,
ty,
samples,
usage,
tiling,
p_property_count,
p_properties,
)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDevice =
extern "system" fn(device: Device, p_allocator: *const AllocationCallbacks) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceQueue = extern "system" fn(
device: Device,
queue_family_index: u32,
queue_index: u32,
p_queue: *mut Queue,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueSubmit = extern "system" fn(
queue: Queue,
submit_count: u32,
p_submits: *const SubmitInfo,
fence: Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueWaitIdle = extern "system" fn(queue: Queue) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDeviceWaitIdle = extern "system" fn(device: Device) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAllocateMemory = extern "system" fn(
device: Device,
p_allocate_info: *const MemoryAllocateInfo,
p_allocator: *const AllocationCallbacks,
p_memory: *mut DeviceMemory,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkFreeMemory = extern "system" fn(
device: Device,
memory: DeviceMemory,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkMapMemory = extern "system" fn(
device: Device,
memory: DeviceMemory,
offset: DeviceSize,
size: DeviceSize,
flags: MemoryMapFlags,
pp_data: *mut *mut c_void,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkUnmapMemory = extern "system" fn(device: Device, memory: DeviceMemory) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkFlushMappedMemoryRanges = extern "system" fn(
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkInvalidateMappedMemoryRanges = extern "system" fn(
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceMemoryCommitment = extern "system" fn(
device: Device,
memory: DeviceMemory,
p_committed_memory_in_bytes: *mut DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkBindBufferMemory = extern "system" fn(
device: Device,
buffer: Buffer,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkBindImageMemory = extern "system" fn(
device: Device,
image: Image,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetBufferMemoryRequirements = extern "system" fn(
device: Device,
buffer: Buffer,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageMemoryRequirements = extern "system" fn(
device: Device,
image: Image,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageSparseMemoryRequirements = extern "system" fn(
device: Device,
image: Image,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueBindSparse = extern "system" fn(
queue: Queue,
bind_info_count: u32,
p_bind_info: *const BindSparseInfo,
fence: Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateFence = extern "system" fn(
device: Device,
p_create_info: *const FenceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyFence = extern "system" fn(
device: Device,
fence: Fence,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkResetFences =
extern "system" fn(device: Device, fence_count: u32, p_fences: *const Fence) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetFenceStatus = extern "system" fn(device: Device, fence: Fence) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkWaitForFences = extern "system" fn(
device: Device,
fence_count: u32,
p_fences: *const Fence,
wait_all: Bool32,
timeout: u64,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateSemaphore = extern "system" fn(
device: Device,
p_create_info: *const SemaphoreCreateInfo,
p_allocator: *const AllocationCallbacks,
p_semaphore: *mut Semaphore,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroySemaphore = extern "system" fn(
device: Device,
semaphore: Semaphore,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateEvent = extern "system" fn(
device: Device,
p_create_info: *const EventCreateInfo,
p_allocator: *const AllocationCallbacks,
p_event: *mut Event,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyEvent = extern "system" fn(
device: Device,
event: Event,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetEventStatus = extern "system" fn(device: Device, event: Event) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkSetEvent = extern "system" fn(device: Device, event: Event) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkResetEvent = extern "system" fn(device: Device, event: Event) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateQueryPool = extern "system" fn(
device: Device,
p_create_info: *const QueryPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_query_pool: *mut QueryPool,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyQueryPool = extern "system" fn(
device: Device,
query_pool: QueryPool,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetQueryPoolResults = extern "system" fn(
device: Device,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
data_size: usize,
p_data: *mut c_void,
stride: DeviceSize,
flags: QueryResultFlags,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateBuffer = extern "system" fn(
device: Device,
p_create_info: *const BufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_buffer: *mut Buffer,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyBuffer = extern "system" fn(
device: Device,
buffer: Buffer,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateBufferView = extern "system" fn(
device: Device,
p_create_info: *const BufferViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut BufferView,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyBufferView = extern "system" fn(
device: Device,
buffer_view: BufferView,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateImage = extern "system" fn(
device: Device,
p_create_info: *const ImageCreateInfo,
p_allocator: *const AllocationCallbacks,
p_image: *mut Image,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyImage = extern "system" fn(
device: Device,
image: Image,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageSubresourceLayout = extern "system" fn(
device: Device,
image: Image,
p_subresource: *const ImageSubresource,
p_layout: *mut SubresourceLayout,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateImageView = extern "system" fn(
device: Device,
p_create_info: *const ImageViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut ImageView,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyImageView = extern "system" fn(
device: Device,
image_view: ImageView,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateShaderModule = extern "system" fn(
device: Device,
p_create_info: *const ShaderModuleCreateInfo,
p_allocator: *const AllocationCallbacks,
p_shader_module: *mut ShaderModule,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyShaderModule = extern "system" fn(
device: Device,
shader_module: ShaderModule,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreatePipelineCache = extern "system" fn(
device: Device,
p_create_info: *const PipelineCacheCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_cache: *mut PipelineCache,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyPipelineCache = extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPipelineCacheData = extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkMergePipelineCaches = extern "system" fn(
device: Device,
dst_cache: PipelineCache,
src_cache_count: u32,
p_src_caches: *const PipelineCache,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateGraphicsPipelines = extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const GraphicsPipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateComputePipelines = extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const ComputePipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyPipeline = extern "system" fn(
device: Device,
pipeline: Pipeline,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreatePipelineLayout = extern "system" fn(
device: Device,
p_create_info: *const PipelineLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_layout: *mut PipelineLayout,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyPipelineLayout = extern "system" fn(
device: Device,
pipeline_layout: PipelineLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateSampler = extern "system" fn(
device: Device,
p_create_info: *const SamplerCreateInfo,
p_allocator: *const AllocationCallbacks,
p_sampler: *mut Sampler,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroySampler = extern "system" fn(
device: Device,
sampler: Sampler,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDescriptorSetLayout = extern "system" fn(
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_set_layout: *mut DescriptorSetLayout,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDescriptorSetLayout = extern "system" fn(
device: Device,
descriptor_set_layout: DescriptorSetLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDescriptorPool = extern "system" fn(
device: Device,
p_create_info: *const DescriptorPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_pool: *mut DescriptorPool,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDescriptorPool = extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkResetDescriptorPool = extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
flags: DescriptorPoolResetFlags,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAllocateDescriptorSets = extern "system" fn(
device: Device,
p_allocate_info: *const DescriptorSetAllocateInfo,
p_descriptor_sets: *mut DescriptorSet,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkFreeDescriptorSets = extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkUpdateDescriptorSets = extern "system" fn(
device: Device,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
descriptor_copy_count: u32,
p_descriptor_copies: *const CopyDescriptorSet,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateFramebuffer = extern "system" fn(
device: Device,
p_create_info: *const FramebufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_framebuffer: *mut Framebuffer,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyFramebuffer = extern "system" fn(
device: Device,
framebuffer: Framebuffer,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateRenderPass = extern "system" fn(
device: Device,
p_create_info: *const RenderPassCreateInfo,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyRenderPass = extern "system" fn(
device: Device,
render_pass: RenderPass,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetRenderAreaGranularity = extern "system" fn(
device: Device,
render_pass: RenderPass,
p_granularity: *mut Extent2D,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateCommandPool = extern "system" fn(
device: Device,
p_create_info: *const CommandPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_command_pool: *mut CommandPool,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyCommandPool = extern "system" fn(
device: Device,
command_pool: CommandPool,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkResetCommandPool = extern "system" fn(
device: Device,
command_pool: CommandPool,
flags: CommandPoolResetFlags,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAllocateCommandBuffers = extern "system" fn(
device: Device,
p_allocate_info: *const CommandBufferAllocateInfo,
p_command_buffers: *mut CommandBuffer,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkFreeCommandBuffers = extern "system" fn(
device: Device,
command_pool: CommandPool,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkBeginCommandBuffer = extern "system" fn(
command_buffer: CommandBuffer,
p_begin_info: *const CommandBufferBeginInfo,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkEndCommandBuffer = extern "system" fn(command_buffer: CommandBuffer) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkResetCommandBuffer =
extern "system" fn(command_buffer: CommandBuffer, flags: CommandBufferResetFlags) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindPipeline = extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
pipeline: Pipeline,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetViewport = extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewports: *const Viewport,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetScissor = extern "system" fn(
command_buffer: CommandBuffer,
first_scissor: u32,
scissor_count: u32,
p_scissors: *const Rect2D,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetLineWidth =
extern "system" fn(command_buffer: CommandBuffer, line_width: f32) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetDepthBias = extern "system" fn(
command_buffer: CommandBuffer,
depth_bias_constant_factor: f32,
depth_bias_clamp: f32,
depth_bias_slope_factor: f32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetBlendConstants =
extern "system" fn(command_buffer: CommandBuffer, blend_constants: &[f32; 4]) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetDepthBounds = extern "system" fn(
command_buffer: CommandBuffer,
min_depth_bounds: f32,
max_depth_bounds: f32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetStencilCompareMask = extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
compare_mask: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetStencilWriteMask = extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
write_mask: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetStencilReference = extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
reference: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindDescriptorSets = extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
first_set: u32,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
dynamic_offset_count: u32,
p_dynamic_offsets: *const u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindIndexBuffer = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
index_type: IndexType,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindVertexBuffers = extern "system" fn(
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDraw = extern "system" fn(
command_buffer: CommandBuffer,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndexed = extern "system" fn(
command_buffer: CommandBuffer,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndirect = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndexedIndirect = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDispatch = extern "system" fn(
command_buffer: CommandBuffer,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDispatchIndirect =
extern "system" fn(command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyBuffer = extern "system" fn(
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferCopy,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyImage = extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageCopy,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBlitImage = extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageBlit,
filter: Filter,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyBufferToImage = extern "system" fn(
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyImageToBuffer = extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdUpdateBuffer = extern "system" fn(
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
data_size: DeviceSize,
p_data: *const c_void,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdFillBuffer = extern "system" fn(
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
size: DeviceSize,
data: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdClearColorImage = extern "system" fn(
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_color: *const ClearColorValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdClearDepthStencilImage = extern "system" fn(
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_depth_stencil: *const ClearDepthStencilValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdClearAttachments = extern "system" fn(
command_buffer: CommandBuffer,
attachment_count: u32,
p_attachments: *const ClearAttachment,
rect_count: u32,
p_rects: *const ClearRect,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdResolveImage = extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageResolve,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetEvent = extern "system" fn(
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdResetEvent = extern "system" fn(
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdWaitEvents = extern "system" fn(
command_buffer: CommandBuffer,
event_count: u32,
p_events: *const Event,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdPipelineBarrier = extern "system" fn(
command_buffer: CommandBuffer,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
dependency_flags: DependencyFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginQuery = extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndQuery =
extern "system" fn(command_buffer: CommandBuffer, query_pool: QueryPool, query: u32) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdResetQueryPool = extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdWriteTimestamp = extern "system" fn(
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
query_pool: QueryPool,
query: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyQueryPoolResults = extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
dst_buffer: Buffer,
dst_offset: DeviceSize,
stride: DeviceSize,
flags: QueryResultFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdPushConstants = extern "system" fn(
command_buffer: CommandBuffer,
layout: PipelineLayout,
stage_flags: ShaderStageFlags,
offset: u32,
size: u32,
p_values: *const c_void,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginRenderPass = extern "system" fn(
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
contents: SubpassContents,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdNextSubpass =
extern "system" fn(command_buffer: CommandBuffer, contents: SubpassContents) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndRenderPass = extern "system" fn(command_buffer: CommandBuffer) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdExecuteCommands = extern "system" fn(
command_buffer: CommandBuffer,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void;
pub struct DeviceFnV1_0 {
pub destroy_device:
extern "system" fn(device: Device, p_allocator: *const AllocationCallbacks) -> c_void,
pub get_device_queue: extern "system" fn(
device: Device,
queue_family_index: u32,
queue_index: u32,
p_queue: *mut Queue,
) -> c_void,
pub queue_submit: extern "system" fn(
queue: Queue,
submit_count: u32,
p_submits: *const SubmitInfo,
fence: Fence,
) -> Result,
pub queue_wait_idle: extern "system" fn(queue: Queue) -> Result,
pub device_wait_idle: extern "system" fn(device: Device) -> Result,
pub allocate_memory: extern "system" fn(
device: Device,
p_allocate_info: *const MemoryAllocateInfo,
p_allocator: *const AllocationCallbacks,
p_memory: *mut DeviceMemory,
) -> Result,
pub free_memory: extern "system" fn(
device: Device,
memory: DeviceMemory,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub map_memory: extern "system" fn(
device: Device,
memory: DeviceMemory,
offset: DeviceSize,
size: DeviceSize,
flags: MemoryMapFlags,
pp_data: *mut *mut c_void,
) -> Result,
pub unmap_memory: extern "system" fn(device: Device, memory: DeviceMemory) -> c_void,
pub flush_mapped_memory_ranges: extern "system" fn(
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result,
pub invalidate_mapped_memory_ranges: extern "system" fn(
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result,
pub get_device_memory_commitment: extern "system" fn(
device: Device,
memory: DeviceMemory,
p_committed_memory_in_bytes: *mut DeviceSize,
) -> c_void,
pub bind_buffer_memory: extern "system" fn(
device: Device,
buffer: Buffer,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result,
pub bind_image_memory: extern "system" fn(
device: Device,
image: Image,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result,
pub get_buffer_memory_requirements: extern "system" fn(
device: Device,
buffer: Buffer,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void,
pub get_image_memory_requirements: extern "system" fn(
device: Device,
image: Image,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void,
pub get_image_sparse_memory_requirements: extern "system" fn(
device: Device,
image: Image,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
) -> c_void,
pub queue_bind_sparse: extern "system" fn(
queue: Queue,
bind_info_count: u32,
p_bind_info: *const BindSparseInfo,
fence: Fence,
) -> Result,
pub create_fence: extern "system" fn(
device: Device,
p_create_info: *const FenceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result,
pub destroy_fence: extern "system" fn(
device: Device,
fence: Fence,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub reset_fences:
extern "system" fn(device: Device, fence_count: u32, p_fences: *const Fence) -> Result,
pub get_fence_status: extern "system" fn(device: Device, fence: Fence) -> Result,
pub wait_for_fences: extern "system" fn(
device: Device,
fence_count: u32,
p_fences: *const Fence,
wait_all: Bool32,
timeout: u64,
) -> Result,
pub create_semaphore: extern "system" fn(
device: Device,
p_create_info: *const SemaphoreCreateInfo,
p_allocator: *const AllocationCallbacks,
p_semaphore: *mut Semaphore,
) -> Result,
pub destroy_semaphore: extern "system" fn(
device: Device,
semaphore: Semaphore,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_event: extern "system" fn(
device: Device,
p_create_info: *const EventCreateInfo,
p_allocator: *const AllocationCallbacks,
p_event: *mut Event,
) -> Result,
pub destroy_event: extern "system" fn(
device: Device,
event: Event,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_event_status: extern "system" fn(device: Device, event: Event) -> Result,
pub set_event: extern "system" fn(device: Device, event: Event) -> Result,
pub reset_event: extern "system" fn(device: Device, event: Event) -> Result,
pub create_query_pool: extern "system" fn(
device: Device,
p_create_info: *const QueryPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_query_pool: *mut QueryPool,
) -> Result,
pub destroy_query_pool: extern "system" fn(
device: Device,
query_pool: QueryPool,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_query_pool_results: extern "system" fn(
device: Device,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
data_size: usize,
p_data: *mut c_void,
stride: DeviceSize,
flags: QueryResultFlags,
) -> Result,
pub create_buffer: extern "system" fn(
device: Device,
p_create_info: *const BufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_buffer: *mut Buffer,
) -> Result,
pub destroy_buffer: extern "system" fn(
device: Device,
buffer: Buffer,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_buffer_view: extern "system" fn(
device: Device,
p_create_info: *const BufferViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut BufferView,
) -> Result,
pub destroy_buffer_view: extern "system" fn(
device: Device,
buffer_view: BufferView,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_image: extern "system" fn(
device: Device,
p_create_info: *const ImageCreateInfo,
p_allocator: *const AllocationCallbacks,
p_image: *mut Image,
) -> Result,
pub destroy_image: extern "system" fn(
device: Device,
image: Image,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_image_subresource_layout: extern "system" fn(
device: Device,
image: Image,
p_subresource: *const ImageSubresource,
p_layout: *mut SubresourceLayout,
) -> c_void,
pub create_image_view: extern "system" fn(
device: Device,
p_create_info: *const ImageViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut ImageView,
) -> Result,
pub destroy_image_view: extern "system" fn(
device: Device,
image_view: ImageView,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_shader_module: extern "system" fn(
device: Device,
p_create_info: *const ShaderModuleCreateInfo,
p_allocator: *const AllocationCallbacks,
p_shader_module: *mut ShaderModule,
) -> Result,
pub destroy_shader_module: extern "system" fn(
device: Device,
shader_module: ShaderModule,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_pipeline_cache: extern "system" fn(
device: Device,
p_create_info: *const PipelineCacheCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_cache: *mut PipelineCache,
) -> Result,
pub destroy_pipeline_cache: extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_pipeline_cache_data: extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result,
pub merge_pipeline_caches: extern "system" fn(
device: Device,
dst_cache: PipelineCache,
src_cache_count: u32,
p_src_caches: *const PipelineCache,
) -> Result,
pub create_graphics_pipelines: extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const GraphicsPipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result,
pub create_compute_pipelines: extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const ComputePipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result,
pub destroy_pipeline: extern "system" fn(
device: Device,
pipeline: Pipeline,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_pipeline_layout: extern "system" fn(
device: Device,
p_create_info: *const PipelineLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_layout: *mut PipelineLayout,
) -> Result,
pub destroy_pipeline_layout: extern "system" fn(
device: Device,
pipeline_layout: PipelineLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_sampler: extern "system" fn(
device: Device,
p_create_info: *const SamplerCreateInfo,
p_allocator: *const AllocationCallbacks,
p_sampler: *mut Sampler,
) -> Result,
pub destroy_sampler: extern "system" fn(
device: Device,
sampler: Sampler,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_descriptor_set_layout: extern "system" fn(
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_set_layout: *mut DescriptorSetLayout,
) -> Result,
pub destroy_descriptor_set_layout: extern "system" fn(
device: Device,
descriptor_set_layout: DescriptorSetLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_descriptor_pool: extern "system" fn(
device: Device,
p_create_info: *const DescriptorPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_pool: *mut DescriptorPool,
) -> Result,
pub destroy_descriptor_pool: extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub reset_descriptor_pool: extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
flags: DescriptorPoolResetFlags,
) -> Result,
pub allocate_descriptor_sets: extern "system" fn(
device: Device,
p_allocate_info: *const DescriptorSetAllocateInfo,
p_descriptor_sets: *mut DescriptorSet,
) -> Result,
pub free_descriptor_sets: extern "system" fn(
device: Device,
descriptor_pool: DescriptorPool,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
) -> Result,
pub update_descriptor_sets: extern "system" fn(
device: Device,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
descriptor_copy_count: u32,
p_descriptor_copies: *const CopyDescriptorSet,
) -> c_void,
pub create_framebuffer: extern "system" fn(
device: Device,
p_create_info: *const FramebufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_framebuffer: *mut Framebuffer,
) -> Result,
pub destroy_framebuffer: extern "system" fn(
device: Device,
framebuffer: Framebuffer,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_render_pass: extern "system" fn(
device: Device,
p_create_info: *const RenderPassCreateInfo,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result,
pub destroy_render_pass: extern "system" fn(
device: Device,
render_pass: RenderPass,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_render_area_granularity: extern "system" fn(
device: Device,
render_pass: RenderPass,
p_granularity: *mut Extent2D,
) -> c_void,
pub create_command_pool: extern "system" fn(
device: Device,
p_create_info: *const CommandPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_command_pool: *mut CommandPool,
) -> Result,
pub destroy_command_pool: extern "system" fn(
device: Device,
command_pool: CommandPool,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub reset_command_pool: extern "system" fn(
device: Device,
command_pool: CommandPool,
flags: CommandPoolResetFlags,
) -> Result,
pub allocate_command_buffers: extern "system" fn(
device: Device,
p_allocate_info: *const CommandBufferAllocateInfo,
p_command_buffers: *mut CommandBuffer,
) -> Result,
pub free_command_buffers: extern "system" fn(
device: Device,
command_pool: CommandPool,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void,
pub begin_command_buffer: extern "system" fn(
command_buffer: CommandBuffer,
p_begin_info: *const CommandBufferBeginInfo,
) -> Result,
pub end_command_buffer: extern "system" fn(command_buffer: CommandBuffer) -> Result,
pub reset_command_buffer:
extern "system" fn(command_buffer: CommandBuffer, flags: CommandBufferResetFlags) -> Result,
pub cmd_bind_pipeline: extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
pipeline: Pipeline,
) -> c_void,
pub cmd_set_viewport: extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewports: *const Viewport,
) -> c_void,
pub cmd_set_scissor: extern "system" fn(
command_buffer: CommandBuffer,
first_scissor: u32,
scissor_count: u32,
p_scissors: *const Rect2D,
) -> c_void,
pub cmd_set_line_width:
extern "system" fn(command_buffer: CommandBuffer, line_width: f32) -> c_void,
pub cmd_set_depth_bias: extern "system" fn(
command_buffer: CommandBuffer,
depth_bias_constant_factor: f32,
depth_bias_clamp: f32,
depth_bias_slope_factor: f32,
) -> c_void,
pub cmd_set_blend_constants:
extern "system" fn(command_buffer: CommandBuffer, blend_constants: &[f32; 4]) -> c_void,
pub cmd_set_depth_bounds: extern "system" fn(
command_buffer: CommandBuffer,
min_depth_bounds: f32,
max_depth_bounds: f32,
) -> c_void,
pub cmd_set_stencil_compare_mask: extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
compare_mask: u32,
) -> c_void,
pub cmd_set_stencil_write_mask: extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
write_mask: u32,
) -> c_void,
pub cmd_set_stencil_reference: extern "system" fn(
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
reference: u32,
) -> c_void,
pub cmd_bind_descriptor_sets: extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
first_set: u32,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
dynamic_offset_count: u32,
p_dynamic_offsets: *const u32,
) -> c_void,
pub cmd_bind_index_buffer: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
index_type: IndexType,
) -> c_void,
pub cmd_bind_vertex_buffers: extern "system" fn(
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
) -> c_void,
pub cmd_draw: extern "system" fn(
command_buffer: CommandBuffer,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) -> c_void,
pub cmd_draw_indexed: extern "system" fn(
command_buffer: CommandBuffer,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) -> c_void,
pub cmd_draw_indirect: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void,
pub cmd_draw_indexed_indirect: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void,
pub cmd_dispatch: extern "system" fn(
command_buffer: CommandBuffer,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void,
pub cmd_dispatch_indirect: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
) -> c_void,
pub cmd_copy_buffer: extern "system" fn(
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferCopy,
) -> c_void,
pub cmd_copy_image: extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageCopy,
) -> c_void,
pub cmd_blit_image: extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageBlit,
filter: Filter,
) -> c_void,
pub cmd_copy_buffer_to_image: extern "system" fn(
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void,
pub cmd_copy_image_to_buffer: extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void,
pub cmd_update_buffer: extern "system" fn(
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
data_size: DeviceSize,
p_data: *const c_void,
) -> c_void,
pub cmd_fill_buffer: extern "system" fn(
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
size: DeviceSize,
data: u32,
) -> c_void,
pub cmd_clear_color_image: extern "system" fn(
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_color: *const ClearColorValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void,
pub cmd_clear_depth_stencil_image: extern "system" fn(
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_depth_stencil: *const ClearDepthStencilValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void,
pub cmd_clear_attachments: extern "system" fn(
command_buffer: CommandBuffer,
attachment_count: u32,
p_attachments: *const ClearAttachment,
rect_count: u32,
p_rects: *const ClearRect,
) -> c_void,
pub cmd_resolve_image: extern "system" fn(
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageResolve,
) -> c_void,
pub cmd_set_event: extern "system" fn(
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void,
pub cmd_reset_event: extern "system" fn(
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void,
pub cmd_wait_events: extern "system" fn(
command_buffer: CommandBuffer,
event_count: u32,
p_events: *const Event,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void,
pub cmd_pipeline_barrier: extern "system" fn(
command_buffer: CommandBuffer,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
dependency_flags: DependencyFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void,
pub cmd_begin_query: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
) -> c_void,
pub cmd_end_query: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
) -> c_void,
pub cmd_reset_query_pool: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
) -> c_void,
pub cmd_write_timestamp: extern "system" fn(
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
query_pool: QueryPool,
query: u32,
) -> c_void,
pub cmd_copy_query_pool_results: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
dst_buffer: Buffer,
dst_offset: DeviceSize,
stride: DeviceSize,
flags: QueryResultFlags,
) -> c_void,
pub cmd_push_constants: extern "system" fn(
command_buffer: CommandBuffer,
layout: PipelineLayout,
stage_flags: ShaderStageFlags,
offset: u32,
size: u32,
p_values: *const c_void,
) -> c_void,
pub cmd_begin_render_pass: extern "system" fn(
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
contents: SubpassContents,
) -> c_void,
pub cmd_next_subpass:
extern "system" fn(command_buffer: CommandBuffer, contents: SubpassContents) -> c_void,
pub cmd_end_render_pass: extern "system" fn(command_buffer: CommandBuffer) -> c_void,
pub cmd_execute_commands: extern "system" fn(
command_buffer: CommandBuffer,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void,
}
unsafe impl Send for DeviceFnV1_0 {}
unsafe impl Sync for DeviceFnV1_0 {}
impl ::std::clone::Clone for DeviceFnV1_0 {
fn clone(&self) -> Self {
DeviceFnV1_0 {
destroy_device: self.destroy_device,
get_device_queue: self.get_device_queue,
queue_submit: self.queue_submit,
queue_wait_idle: self.queue_wait_idle,
device_wait_idle: self.device_wait_idle,
allocate_memory: self.allocate_memory,
free_memory: self.free_memory,
map_memory: self.map_memory,
unmap_memory: self.unmap_memory,
flush_mapped_memory_ranges: self.flush_mapped_memory_ranges,
invalidate_mapped_memory_ranges: self.invalidate_mapped_memory_ranges,
get_device_memory_commitment: self.get_device_memory_commitment,
bind_buffer_memory: self.bind_buffer_memory,
bind_image_memory: self.bind_image_memory,
get_buffer_memory_requirements: self.get_buffer_memory_requirements,
get_image_memory_requirements: self.get_image_memory_requirements,
get_image_sparse_memory_requirements: self.get_image_sparse_memory_requirements,
queue_bind_sparse: self.queue_bind_sparse,
create_fence: self.create_fence,
destroy_fence: self.destroy_fence,
reset_fences: self.reset_fences,
get_fence_status: self.get_fence_status,
wait_for_fences: self.wait_for_fences,
create_semaphore: self.create_semaphore,
destroy_semaphore: self.destroy_semaphore,
create_event: self.create_event,
destroy_event: self.destroy_event,
get_event_status: self.get_event_status,
set_event: self.set_event,
reset_event: self.reset_event,
create_query_pool: self.create_query_pool,
destroy_query_pool: self.destroy_query_pool,
get_query_pool_results: self.get_query_pool_results,
create_buffer: self.create_buffer,
destroy_buffer: self.destroy_buffer,
create_buffer_view: self.create_buffer_view,
destroy_buffer_view: self.destroy_buffer_view,
create_image: self.create_image,
destroy_image: self.destroy_image,
get_image_subresource_layout: self.get_image_subresource_layout,
create_image_view: self.create_image_view,
destroy_image_view: self.destroy_image_view,
create_shader_module: self.create_shader_module,
destroy_shader_module: self.destroy_shader_module,
create_pipeline_cache: self.create_pipeline_cache,
destroy_pipeline_cache: self.destroy_pipeline_cache,
get_pipeline_cache_data: self.get_pipeline_cache_data,
merge_pipeline_caches: self.merge_pipeline_caches,
create_graphics_pipelines: self.create_graphics_pipelines,
create_compute_pipelines: self.create_compute_pipelines,
destroy_pipeline: self.destroy_pipeline,
create_pipeline_layout: self.create_pipeline_layout,
destroy_pipeline_layout: self.destroy_pipeline_layout,
create_sampler: self.create_sampler,
destroy_sampler: self.destroy_sampler,
create_descriptor_set_layout: self.create_descriptor_set_layout,
destroy_descriptor_set_layout: self.destroy_descriptor_set_layout,
create_descriptor_pool: self.create_descriptor_pool,
destroy_descriptor_pool: self.destroy_descriptor_pool,
reset_descriptor_pool: self.reset_descriptor_pool,
allocate_descriptor_sets: self.allocate_descriptor_sets,
free_descriptor_sets: self.free_descriptor_sets,
update_descriptor_sets: self.update_descriptor_sets,
create_framebuffer: self.create_framebuffer,
destroy_framebuffer: self.destroy_framebuffer,
create_render_pass: self.create_render_pass,
destroy_render_pass: self.destroy_render_pass,
get_render_area_granularity: self.get_render_area_granularity,
create_command_pool: self.create_command_pool,
destroy_command_pool: self.destroy_command_pool,
reset_command_pool: self.reset_command_pool,
allocate_command_buffers: self.allocate_command_buffers,
free_command_buffers: self.free_command_buffers,
begin_command_buffer: self.begin_command_buffer,
end_command_buffer: self.end_command_buffer,
reset_command_buffer: self.reset_command_buffer,
cmd_bind_pipeline: self.cmd_bind_pipeline,
cmd_set_viewport: self.cmd_set_viewport,
cmd_set_scissor: self.cmd_set_scissor,
cmd_set_line_width: self.cmd_set_line_width,
cmd_set_depth_bias: self.cmd_set_depth_bias,
cmd_set_blend_constants: self.cmd_set_blend_constants,
cmd_set_depth_bounds: self.cmd_set_depth_bounds,
cmd_set_stencil_compare_mask: self.cmd_set_stencil_compare_mask,
cmd_set_stencil_write_mask: self.cmd_set_stencil_write_mask,
cmd_set_stencil_reference: self.cmd_set_stencil_reference,
cmd_bind_descriptor_sets: self.cmd_bind_descriptor_sets,
cmd_bind_index_buffer: self.cmd_bind_index_buffer,
cmd_bind_vertex_buffers: self.cmd_bind_vertex_buffers,
cmd_draw: self.cmd_draw,
cmd_draw_indexed: self.cmd_draw_indexed,
cmd_draw_indirect: self.cmd_draw_indirect,
cmd_draw_indexed_indirect: self.cmd_draw_indexed_indirect,
cmd_dispatch: self.cmd_dispatch,
cmd_dispatch_indirect: self.cmd_dispatch_indirect,
cmd_copy_buffer: self.cmd_copy_buffer,
cmd_copy_image: self.cmd_copy_image,
cmd_blit_image: self.cmd_blit_image,
cmd_copy_buffer_to_image: self.cmd_copy_buffer_to_image,
cmd_copy_image_to_buffer: self.cmd_copy_image_to_buffer,
cmd_update_buffer: self.cmd_update_buffer,
cmd_fill_buffer: self.cmd_fill_buffer,
cmd_clear_color_image: self.cmd_clear_color_image,
cmd_clear_depth_stencil_image: self.cmd_clear_depth_stencil_image,
cmd_clear_attachments: self.cmd_clear_attachments,
cmd_resolve_image: self.cmd_resolve_image,
cmd_set_event: self.cmd_set_event,
cmd_reset_event: self.cmd_reset_event,
cmd_wait_events: self.cmd_wait_events,
cmd_pipeline_barrier: self.cmd_pipeline_barrier,
cmd_begin_query: self.cmd_begin_query,
cmd_end_query: self.cmd_end_query,
cmd_reset_query_pool: self.cmd_reset_query_pool,
cmd_write_timestamp: self.cmd_write_timestamp,
cmd_copy_query_pool_results: self.cmd_copy_query_pool_results,
cmd_push_constants: self.cmd_push_constants,
cmd_begin_render_pass: self.cmd_begin_render_pass,
cmd_next_subpass: self.cmd_next_subpass,
cmd_end_render_pass: self.cmd_end_render_pass,
cmd_execute_commands: self.cmd_execute_commands,
}
}
}
impl DeviceFnV1_0 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
DeviceFnV1_0 {
destroy_device: unsafe {
extern "system" fn destroy_device(
_device: Device,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_device)))
}
let raw_name = stringify!(vkDestroyDevice);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_device
} else {
::std::mem::transmute(val)
}
},
get_device_queue: unsafe {
extern "system" fn get_device_queue(
_device: Device,
_queue_family_index: u32,
_queue_index: u32,
_p_queue: *mut Queue,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(get_device_queue)))
}
let raw_name = stringify!(vkGetDeviceQueue);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_queue
} else {
::std::mem::transmute(val)
}
},
queue_submit: unsafe {
extern "system" fn queue_submit(
_queue: Queue,
_submit_count: u32,
_p_submits: *const SubmitInfo,
_fence: Fence,
) -> Result {
panic!(concat!("Unable to load ", stringify!(queue_submit)))
}
let raw_name = stringify!(vkQueueSubmit);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_submit
} else {
::std::mem::transmute(val)
}
},
queue_wait_idle: unsafe {
extern "system" fn queue_wait_idle(_queue: Queue) -> Result {
panic!(concat!("Unable to load ", stringify!(queue_wait_idle)))
}
let raw_name = stringify!(vkQueueWaitIdle);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_wait_idle
} else {
::std::mem::transmute(val)
}
},
device_wait_idle: unsafe {
extern "system" fn device_wait_idle(_device: Device) -> Result {
panic!(concat!("Unable to load ", stringify!(device_wait_idle)))
}
let raw_name = stringify!(vkDeviceWaitIdle);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
device_wait_idle
} else {
::std::mem::transmute(val)
}
},
allocate_memory: unsafe {
extern "system" fn allocate_memory(
_device: Device,
_p_allocate_info: *const MemoryAllocateInfo,
_p_allocator: *const AllocationCallbacks,
_p_memory: *mut DeviceMemory,
) -> Result {
panic!(concat!("Unable to load ", stringify!(allocate_memory)))
}
let raw_name = stringify!(vkAllocateMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
allocate_memory
} else {
::std::mem::transmute(val)
}
},
free_memory: unsafe {
extern "system" fn free_memory(
_device: Device,
_memory: DeviceMemory,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(free_memory)))
}
let raw_name = stringify!(vkFreeMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
free_memory
} else {
::std::mem::transmute(val)
}
},
map_memory: unsafe {
extern "system" fn map_memory(
_device: Device,
_memory: DeviceMemory,
_offset: DeviceSize,
_size: DeviceSize,
_flags: MemoryMapFlags,
_pp_data: *mut *mut c_void,
) -> Result {
panic!(concat!("Unable to load ", stringify!(map_memory)))
}
let raw_name = stringify!(vkMapMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
map_memory
} else {
::std::mem::transmute(val)
}
},
unmap_memory: unsafe {
extern "system" fn unmap_memory(_device: Device, _memory: DeviceMemory) -> c_void {
panic!(concat!("Unable to load ", stringify!(unmap_memory)))
}
let raw_name = stringify!(vkUnmapMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
unmap_memory
} else {
::std::mem::transmute(val)
}
},
flush_mapped_memory_ranges: unsafe {
extern "system" fn flush_mapped_memory_ranges(
_device: Device,
_memory_range_count: u32,
_p_memory_ranges: *const MappedMemoryRange,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(flush_mapped_memory_ranges)
))
}
let raw_name = stringify!(vkFlushMappedMemoryRanges);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
flush_mapped_memory_ranges
} else {
::std::mem::transmute(val)
}
},
invalidate_mapped_memory_ranges: unsafe {
extern "system" fn invalidate_mapped_memory_ranges(
_device: Device,
_memory_range_count: u32,
_p_memory_ranges: *const MappedMemoryRange,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(invalidate_mapped_memory_ranges)
))
}
let raw_name = stringify!(vkInvalidateMappedMemoryRanges);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
invalidate_mapped_memory_ranges
} else {
::std::mem::transmute(val)
}
},
get_device_memory_commitment: unsafe {
extern "system" fn get_device_memory_commitment(
_device: Device,
_memory: DeviceMemory,
_p_committed_memory_in_bytes: *mut DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_device_memory_commitment)
))
}
let raw_name = stringify!(vkGetDeviceMemoryCommitment);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_memory_commitment
} else {
::std::mem::transmute(val)
}
},
bind_buffer_memory: unsafe {
extern "system" fn bind_buffer_memory(
_device: Device,
_buffer: Buffer,
_memory: DeviceMemory,
_memory_offset: DeviceSize,
) -> Result {
panic!(concat!("Unable to load ", stringify!(bind_buffer_memory)))
}
let raw_name = stringify!(vkBindBufferMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
bind_buffer_memory
} else {
::std::mem::transmute(val)
}
},
bind_image_memory: unsafe {
extern "system" fn bind_image_memory(
_device: Device,
_image: Image,
_memory: DeviceMemory,
_memory_offset: DeviceSize,
) -> Result {
panic!(concat!("Unable to load ", stringify!(bind_image_memory)))
}
let raw_name = stringify!(vkBindImageMemory);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
bind_image_memory
} else {
::std::mem::transmute(val)
}
},
get_buffer_memory_requirements: unsafe {
extern "system" fn get_buffer_memory_requirements(
_device: Device,
_buffer: Buffer,
_p_memory_requirements: *mut MemoryRequirements,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_buffer_memory_requirements)
))
}
let raw_name = stringify!(vkGetBufferMemoryRequirements);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_buffer_memory_requirements
} else {
::std::mem::transmute(val)
}
},
get_image_memory_requirements: unsafe {
extern "system" fn get_image_memory_requirements(
_device: Device,
_image: Image,
_p_memory_requirements: *mut MemoryRequirements,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_image_memory_requirements)
))
}
let raw_name = stringify!(vkGetImageMemoryRequirements);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_memory_requirements
} else {
::std::mem::transmute(val)
}
},
get_image_sparse_memory_requirements: unsafe {
extern "system" fn get_image_sparse_memory_requirements(
_device: Device,
_image: Image,
_p_sparse_memory_requirement_count: *mut u32,
_p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_image_sparse_memory_requirements)
))
}
let raw_name = stringify!(vkGetImageSparseMemoryRequirements);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_sparse_memory_requirements
} else {
::std::mem::transmute(val)
}
},
queue_bind_sparse: unsafe {
extern "system" fn queue_bind_sparse(
_queue: Queue,
_bind_info_count: u32,
_p_bind_info: *const BindSparseInfo,
_fence: Fence,
) -> Result {
panic!(concat!("Unable to load ", stringify!(queue_bind_sparse)))
}
let raw_name = stringify!(vkQueueBindSparse);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_bind_sparse
} else {
::std::mem::transmute(val)
}
},
create_fence: unsafe {
extern "system" fn create_fence(
_device: Device,
_p_create_info: *const FenceCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_fence: *mut Fence,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_fence)))
}
let raw_name = stringify!(vkCreateFence);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_fence
} else {
::std::mem::transmute(val)
}
},
destroy_fence: unsafe {
extern "system" fn destroy_fence(
_device: Device,
_fence: Fence,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_fence)))
}
let raw_name = stringify!(vkDestroyFence);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_fence
} else {
::std::mem::transmute(val)
}
},
reset_fences: unsafe {
extern "system" fn reset_fences(
_device: Device,
_fence_count: u32,
_p_fences: *const Fence,
) -> Result {
panic!(concat!("Unable to load ", stringify!(reset_fences)))
}
let raw_name = stringify!(vkResetFences);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
reset_fences
} else {
::std::mem::transmute(val)
}
},
get_fence_status: unsafe {
extern "system" fn get_fence_status(_device: Device, _fence: Fence) -> Result {
panic!(concat!("Unable to load ", stringify!(get_fence_status)))
}
let raw_name = stringify!(vkGetFenceStatus);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_fence_status
} else {
::std::mem::transmute(val)
}
},
wait_for_fences: unsafe {
extern "system" fn wait_for_fences(
_device: Device,
_fence_count: u32,
_p_fences: *const Fence,
_wait_all: Bool32,
_timeout: u64,
) -> Result {
panic!(concat!("Unable to load ", stringify!(wait_for_fences)))
}
let raw_name = stringify!(vkWaitForFences);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
wait_for_fences
} else {
::std::mem::transmute(val)
}
},
create_semaphore: unsafe {
extern "system" fn create_semaphore(
_device: Device,
_p_create_info: *const SemaphoreCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_semaphore: *mut Semaphore,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_semaphore)))
}
let raw_name = stringify!(vkCreateSemaphore);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_semaphore
} else {
::std::mem::transmute(val)
}
},
destroy_semaphore: unsafe {
extern "system" fn destroy_semaphore(
_device: Device,
_semaphore: Semaphore,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_semaphore)))
}
let raw_name = stringify!(vkDestroySemaphore);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_semaphore
} else {
::std::mem::transmute(val)
}
},
create_event: unsafe {
extern "system" fn create_event(
_device: Device,
_p_create_info: *const EventCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_event: *mut Event,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_event)))
}
let raw_name = stringify!(vkCreateEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_event
} else {
::std::mem::transmute(val)
}
},
destroy_event: unsafe {
extern "system" fn destroy_event(
_device: Device,
_event: Event,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_event)))
}
let raw_name = stringify!(vkDestroyEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_event
} else {
::std::mem::transmute(val)
}
},
get_event_status: unsafe {
extern "system" fn get_event_status(_device: Device, _event: Event) -> Result {
panic!(concat!("Unable to load ", stringify!(get_event_status)))
}
let raw_name = stringify!(vkGetEventStatus);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_event_status
} else {
::std::mem::transmute(val)
}
},
set_event: unsafe {
extern "system" fn set_event(_device: Device, _event: Event) -> Result {
panic!(concat!("Unable to load ", stringify!(set_event)))
}
let raw_name = stringify!(vkSetEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
set_event
} else {
::std::mem::transmute(val)
}
},
reset_event: unsafe {
extern "system" fn reset_event(_device: Device, _event: Event) -> Result {
panic!(concat!("Unable to load ", stringify!(reset_event)))
}
let raw_name = stringify!(vkResetEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
reset_event
} else {
::std::mem::transmute(val)
}
},
create_query_pool: unsafe {
extern "system" fn create_query_pool(
_device: Device,
_p_create_info: *const QueryPoolCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_query_pool: *mut QueryPool,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_query_pool)))
}
let raw_name = stringify!(vkCreateQueryPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_query_pool
} else {
::std::mem::transmute(val)
}
},
destroy_query_pool: unsafe {
extern "system" fn destroy_query_pool(
_device: Device,
_query_pool: QueryPool,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_query_pool)))
}
let raw_name = stringify!(vkDestroyQueryPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_query_pool
} else {
::std::mem::transmute(val)
}
},
get_query_pool_results: unsafe {
extern "system" fn get_query_pool_results(
_device: Device,
_query_pool: QueryPool,
_first_query: u32,
_query_count: u32,
_data_size: usize,
_p_data: *mut c_void,
_stride: DeviceSize,
_flags: QueryResultFlags,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_query_pool_results)
))
}
let raw_name = stringify!(vkGetQueryPoolResults);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_query_pool_results
} else {
::std::mem::transmute(val)
}
},
create_buffer: unsafe {
extern "system" fn create_buffer(
_device: Device,
_p_create_info: *const BufferCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_buffer: *mut Buffer,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_buffer)))
}
let raw_name = stringify!(vkCreateBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_buffer
} else {
::std::mem::transmute(val)
}
},
destroy_buffer: unsafe {
extern "system" fn destroy_buffer(
_device: Device,
_buffer: Buffer,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_buffer)))
}
let raw_name = stringify!(vkDestroyBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_buffer
} else {
::std::mem::transmute(val)
}
},
create_buffer_view: unsafe {
extern "system" fn create_buffer_view(
_device: Device,
_p_create_info: *const BufferViewCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_view: *mut BufferView,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_buffer_view)))
}
let raw_name = stringify!(vkCreateBufferView);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_buffer_view
} else {
::std::mem::transmute(val)
}
},
destroy_buffer_view: unsafe {
extern "system" fn destroy_buffer_view(
_device: Device,
_buffer_view: BufferView,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_buffer_view)))
}
let raw_name = stringify!(vkDestroyBufferView);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_buffer_view
} else {
::std::mem::transmute(val)
}
},
create_image: unsafe {
extern "system" fn create_image(
_device: Device,
_p_create_info: *const ImageCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_image: *mut Image,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_image)))
}
let raw_name = stringify!(vkCreateImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_image
} else {
::std::mem::transmute(val)
}
},
destroy_image: unsafe {
extern "system" fn destroy_image(
_device: Device,
_image: Image,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_image)))
}
let raw_name = stringify!(vkDestroyImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_image
} else {
::std::mem::transmute(val)
}
},
get_image_subresource_layout: unsafe {
extern "system" fn get_image_subresource_layout(
_device: Device,
_image: Image,
_p_subresource: *const ImageSubresource,
_p_layout: *mut SubresourceLayout,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_image_subresource_layout)
))
}
let raw_name = stringify!(vkGetImageSubresourceLayout);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_subresource_layout
} else {
::std::mem::transmute(val)
}
},
create_image_view: unsafe {
extern "system" fn create_image_view(
_device: Device,
_p_create_info: *const ImageViewCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_view: *mut ImageView,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_image_view)))
}
let raw_name = stringify!(vkCreateImageView);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_image_view
} else {
::std::mem::transmute(val)
}
},
destroy_image_view: unsafe {
extern "system" fn destroy_image_view(
_device: Device,
_image_view: ImageView,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_image_view)))
}
let raw_name = stringify!(vkDestroyImageView);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_image_view
} else {
::std::mem::transmute(val)
}
},
create_shader_module: unsafe {
extern "system" fn create_shader_module(
_device: Device,
_p_create_info: *const ShaderModuleCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_shader_module: *mut ShaderModule,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_shader_module)))
}
let raw_name = stringify!(vkCreateShaderModule);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_shader_module
} else {
::std::mem::transmute(val)
}
},
destroy_shader_module: unsafe {
extern "system" fn destroy_shader_module(
_device: Device,
_shader_module: ShaderModule,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_shader_module)
))
}
let raw_name = stringify!(vkDestroyShaderModule);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_shader_module
} else {
::std::mem::transmute(val)
}
},
create_pipeline_cache: unsafe {
extern "system" fn create_pipeline_cache(
_device: Device,
_p_create_info: *const PipelineCacheCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_pipeline_cache: *mut PipelineCache,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_pipeline_cache)
))
}
let raw_name = stringify!(vkCreatePipelineCache);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_pipeline_cache
} else {
::std::mem::transmute(val)
}
},
destroy_pipeline_cache: unsafe {
extern "system" fn destroy_pipeline_cache(
_device: Device,
_pipeline_cache: PipelineCache,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_pipeline_cache)
))
}
let raw_name = stringify!(vkDestroyPipelineCache);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_pipeline_cache
} else {
::std::mem::transmute(val)
}
},
get_pipeline_cache_data: unsafe {
extern "system" fn get_pipeline_cache_data(
_device: Device,
_pipeline_cache: PipelineCache,
_p_data_size: *mut usize,
_p_data: *mut c_void,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_pipeline_cache_data)
))
}
let raw_name = stringify!(vkGetPipelineCacheData);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_pipeline_cache_data
} else {
::std::mem::transmute(val)
}
},
merge_pipeline_caches: unsafe {
extern "system" fn merge_pipeline_caches(
_device: Device,
_dst_cache: PipelineCache,
_src_cache_count: u32,
_p_src_caches: *const PipelineCache,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(merge_pipeline_caches)
))
}
let raw_name = stringify!(vkMergePipelineCaches);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
merge_pipeline_caches
} else {
::std::mem::transmute(val)
}
},
create_graphics_pipelines: unsafe {
extern "system" fn create_graphics_pipelines(
_device: Device,
_pipeline_cache: PipelineCache,
_create_info_count: u32,
_p_create_infos: *const GraphicsPipelineCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_pipelines: *mut Pipeline,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_graphics_pipelines)
))
}
let raw_name = stringify!(vkCreateGraphicsPipelines);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_graphics_pipelines
} else {
::std::mem::transmute(val)
}
},
create_compute_pipelines: unsafe {
extern "system" fn create_compute_pipelines(
_device: Device,
_pipeline_cache: PipelineCache,
_create_info_count: u32,
_p_create_infos: *const ComputePipelineCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_pipelines: *mut Pipeline,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_compute_pipelines)
))
}
let raw_name = stringify!(vkCreateComputePipelines);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_compute_pipelines
} else {
::std::mem::transmute(val)
}
},
destroy_pipeline: unsafe {
extern "system" fn destroy_pipeline(
_device: Device,
_pipeline: Pipeline,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_pipeline)))
}
let raw_name = stringify!(vkDestroyPipeline);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_pipeline
} else {
::std::mem::transmute(val)
}
},
create_pipeline_layout: unsafe {
extern "system" fn create_pipeline_layout(
_device: Device,
_p_create_info: *const PipelineLayoutCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_pipeline_layout: *mut PipelineLayout,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_pipeline_layout)
))
}
let raw_name = stringify!(vkCreatePipelineLayout);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_pipeline_layout
} else {
::std::mem::transmute(val)
}
},
destroy_pipeline_layout: unsafe {
extern "system" fn destroy_pipeline_layout(
_device: Device,
_pipeline_layout: PipelineLayout,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_pipeline_layout)
))
}
let raw_name = stringify!(vkDestroyPipelineLayout);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_pipeline_layout
} else {
::std::mem::transmute(val)
}
},
create_sampler: unsafe {
extern "system" fn create_sampler(
_device: Device,
_p_create_info: *const SamplerCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_sampler: *mut Sampler,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_sampler)))
}
let raw_name = stringify!(vkCreateSampler);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_sampler
} else {
::std::mem::transmute(val)
}
},
destroy_sampler: unsafe {
extern "system" fn destroy_sampler(
_device: Device,
_sampler: Sampler,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_sampler)))
}
let raw_name = stringify!(vkDestroySampler);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_sampler
} else {
::std::mem::transmute(val)
}
},
create_descriptor_set_layout: unsafe {
extern "system" fn create_descriptor_set_layout(
_device: Device,
_p_create_info: *const DescriptorSetLayoutCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_set_layout: *mut DescriptorSetLayout,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_descriptor_set_layout)
))
}
let raw_name = stringify!(vkCreateDescriptorSetLayout);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_descriptor_set_layout
} else {
::std::mem::transmute(val)
}
},
destroy_descriptor_set_layout: unsafe {
extern "system" fn destroy_descriptor_set_layout(
_device: Device,
_descriptor_set_layout: DescriptorSetLayout,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_descriptor_set_layout)
))
}
let raw_name = stringify!(vkDestroyDescriptorSetLayout);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_descriptor_set_layout
} else {
::std::mem::transmute(val)
}
},
create_descriptor_pool: unsafe {
extern "system" fn create_descriptor_pool(
_device: Device,
_p_create_info: *const DescriptorPoolCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_descriptor_pool: *mut DescriptorPool,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_descriptor_pool)
))
}
let raw_name = stringify!(vkCreateDescriptorPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_descriptor_pool
} else {
::std::mem::transmute(val)
}
},
destroy_descriptor_pool: unsafe {
extern "system" fn destroy_descriptor_pool(
_device: Device,
_descriptor_pool: DescriptorPool,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_descriptor_pool)
))
}
let raw_name = stringify!(vkDestroyDescriptorPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_descriptor_pool
} else {
::std::mem::transmute(val)
}
},
reset_descriptor_pool: unsafe {
extern "system" fn reset_descriptor_pool(
_device: Device,
_descriptor_pool: DescriptorPool,
_flags: DescriptorPoolResetFlags,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(reset_descriptor_pool)
))
}
let raw_name = stringify!(vkResetDescriptorPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
reset_descriptor_pool
} else {
::std::mem::transmute(val)
}
},
allocate_descriptor_sets: unsafe {
extern "system" fn allocate_descriptor_sets(
_device: Device,
_p_allocate_info: *const DescriptorSetAllocateInfo,
_p_descriptor_sets: *mut DescriptorSet,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(allocate_descriptor_sets)
))
}
let raw_name = stringify!(vkAllocateDescriptorSets);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
allocate_descriptor_sets
} else {
::std::mem::transmute(val)
}
},
free_descriptor_sets: unsafe {
extern "system" fn free_descriptor_sets(
_device: Device,
_descriptor_pool: DescriptorPool,
_descriptor_set_count: u32,
_p_descriptor_sets: *const DescriptorSet,
) -> Result {
panic!(concat!("Unable to load ", stringify!(free_descriptor_sets)))
}
let raw_name = stringify!(vkFreeDescriptorSets);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
free_descriptor_sets
} else {
::std::mem::transmute(val)
}
},
update_descriptor_sets: unsafe {
extern "system" fn update_descriptor_sets(
_device: Device,
_descriptor_write_count: u32,
_p_descriptor_writes: *const WriteDescriptorSet,
_descriptor_copy_count: u32,
_p_descriptor_copies: *const CopyDescriptorSet,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(update_descriptor_sets)
))
}
let raw_name = stringify!(vkUpdateDescriptorSets);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
update_descriptor_sets
} else {
::std::mem::transmute(val)
}
},
create_framebuffer: unsafe {
extern "system" fn create_framebuffer(
_device: Device,
_p_create_info: *const FramebufferCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_framebuffer: *mut Framebuffer,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_framebuffer)))
}
let raw_name = stringify!(vkCreateFramebuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_framebuffer
} else {
::std::mem::transmute(val)
}
},
destroy_framebuffer: unsafe {
extern "system" fn destroy_framebuffer(
_device: Device,
_framebuffer: Framebuffer,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_framebuffer)))
}
let raw_name = stringify!(vkDestroyFramebuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_framebuffer
} else {
::std::mem::transmute(val)
}
},
create_render_pass: unsafe {
extern "system" fn create_render_pass(
_device: Device,
_p_create_info: *const RenderPassCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_render_pass: *mut RenderPass,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_render_pass)))
}
let raw_name = stringify!(vkCreateRenderPass);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_render_pass
} else {
::std::mem::transmute(val)
}
},
destroy_render_pass: unsafe {
extern "system" fn destroy_render_pass(
_device: Device,
_render_pass: RenderPass,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_render_pass)))
}
let raw_name = stringify!(vkDestroyRenderPass);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_render_pass
} else {
::std::mem::transmute(val)
}
},
get_render_area_granularity: unsafe {
extern "system" fn get_render_area_granularity(
_device: Device,
_render_pass: RenderPass,
_p_granularity: *mut Extent2D,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_render_area_granularity)
))
}
let raw_name = stringify!(vkGetRenderAreaGranularity);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_render_area_granularity
} else {
::std::mem::transmute(val)
}
},
create_command_pool: unsafe {
extern "system" fn create_command_pool(
_device: Device,
_p_create_info: *const CommandPoolCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_command_pool: *mut CommandPool,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_command_pool)))
}
let raw_name = stringify!(vkCreateCommandPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_command_pool
} else {
::std::mem::transmute(val)
}
},
destroy_command_pool: unsafe {
extern "system" fn destroy_command_pool(
_device: Device,
_command_pool: CommandPool,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_command_pool)))
}
let raw_name = stringify!(vkDestroyCommandPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_command_pool
} else {
::std::mem::transmute(val)
}
},
reset_command_pool: unsafe {
extern "system" fn reset_command_pool(
_device: Device,
_command_pool: CommandPool,
_flags: CommandPoolResetFlags,
) -> Result {
panic!(concat!("Unable to load ", stringify!(reset_command_pool)))
}
let raw_name = stringify!(vkResetCommandPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
reset_command_pool
} else {
::std::mem::transmute(val)
}
},
allocate_command_buffers: unsafe {
extern "system" fn allocate_command_buffers(
_device: Device,
_p_allocate_info: *const CommandBufferAllocateInfo,
_p_command_buffers: *mut CommandBuffer,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(allocate_command_buffers)
))
}
let raw_name = stringify!(vkAllocateCommandBuffers);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
allocate_command_buffers
} else {
::std::mem::transmute(val)
}
},
free_command_buffers: unsafe {
extern "system" fn free_command_buffers(
_device: Device,
_command_pool: CommandPool,
_command_buffer_count: u32,
_p_command_buffers: *const CommandBuffer,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(free_command_buffers)))
}
let raw_name = stringify!(vkFreeCommandBuffers);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
free_command_buffers
} else {
::std::mem::transmute(val)
}
},
begin_command_buffer: unsafe {
extern "system" fn begin_command_buffer(
_command_buffer: CommandBuffer,
_p_begin_info: *const CommandBufferBeginInfo,
) -> Result {
panic!(concat!("Unable to load ", stringify!(begin_command_buffer)))
}
let raw_name = stringify!(vkBeginCommandBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
begin_command_buffer
} else {
::std::mem::transmute(val)
}
},
end_command_buffer: unsafe {
extern "system" fn end_command_buffer(_command_buffer: CommandBuffer) -> Result {
panic!(concat!("Unable to load ", stringify!(end_command_buffer)))
}
let raw_name = stringify!(vkEndCommandBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
end_command_buffer
} else {
::std::mem::transmute(val)
}
},
reset_command_buffer: unsafe {
extern "system" fn reset_command_buffer(
_command_buffer: CommandBuffer,
_flags: CommandBufferResetFlags,
) -> Result {
panic!(concat!("Unable to load ", stringify!(reset_command_buffer)))
}
let raw_name = stringify!(vkResetCommandBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
reset_command_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_bind_pipeline: unsafe {
extern "system" fn cmd_bind_pipeline(
_command_buffer: CommandBuffer,
_pipeline_bind_point: PipelineBindPoint,
_pipeline: Pipeline,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_bind_pipeline)))
}
let raw_name = stringify!(vkCmdBindPipeline);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_pipeline
} else {
::std::mem::transmute(val)
}
},
cmd_set_viewport: unsafe {
extern "system" fn cmd_set_viewport(
_command_buffer: CommandBuffer,
_first_viewport: u32,
_viewport_count: u32,
_p_viewports: *const Viewport,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_viewport)))
}
let raw_name = stringify!(vkCmdSetViewport);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_viewport
} else {
::std::mem::transmute(val)
}
},
cmd_set_scissor: unsafe {
extern "system" fn cmd_set_scissor(
_command_buffer: CommandBuffer,
_first_scissor: u32,
_scissor_count: u32,
_p_scissors: *const Rect2D,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_scissor)))
}
let raw_name = stringify!(vkCmdSetScissor);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_scissor
} else {
::std::mem::transmute(val)
}
},
cmd_set_line_width: unsafe {
extern "system" fn cmd_set_line_width(
_command_buffer: CommandBuffer,
_line_width: f32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_line_width)))
}
let raw_name = stringify!(vkCmdSetLineWidth);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_line_width
} else {
::std::mem::transmute(val)
}
},
cmd_set_depth_bias: unsafe {
extern "system" fn cmd_set_depth_bias(
_command_buffer: CommandBuffer,
_depth_bias_constant_factor: f32,
_depth_bias_clamp: f32,
_depth_bias_slope_factor: f32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bias)))
}
let raw_name = stringify!(vkCmdSetDepthBias);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_depth_bias
} else {
::std::mem::transmute(val)
}
},
cmd_set_blend_constants: unsafe {
extern "system" fn cmd_set_blend_constants(
_command_buffer: CommandBuffer,
_blend_constants: &[f32; 4],
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_blend_constants)
))
}
let raw_name = stringify!(vkCmdSetBlendConstants);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_blend_constants
} else {
::std::mem::transmute(val)
}
},
cmd_set_depth_bounds: unsafe {
extern "system" fn cmd_set_depth_bounds(
_command_buffer: CommandBuffer,
_min_depth_bounds: f32,
_max_depth_bounds: f32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bounds)))
}
let raw_name = stringify!(vkCmdSetDepthBounds);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_depth_bounds
} else {
::std::mem::transmute(val)
}
},
cmd_set_stencil_compare_mask: unsafe {
extern "system" fn cmd_set_stencil_compare_mask(
_command_buffer: CommandBuffer,
_face_mask: StencilFaceFlags,
_compare_mask: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_stencil_compare_mask)
))
}
let raw_name = stringify!(vkCmdSetStencilCompareMask);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_stencil_compare_mask
} else {
::std::mem::transmute(val)
}
},
cmd_set_stencil_write_mask: unsafe {
extern "system" fn cmd_set_stencil_write_mask(
_command_buffer: CommandBuffer,
_face_mask: StencilFaceFlags,
_write_mask: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_stencil_write_mask)
))
}
let raw_name = stringify!(vkCmdSetStencilWriteMask);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_stencil_write_mask
} else {
::std::mem::transmute(val)
}
},
cmd_set_stencil_reference: unsafe {
extern "system" fn cmd_set_stencil_reference(
_command_buffer: CommandBuffer,
_face_mask: StencilFaceFlags,
_reference: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_stencil_reference)
))
}
let raw_name = stringify!(vkCmdSetStencilReference);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_stencil_reference
} else {
::std::mem::transmute(val)
}
},
cmd_bind_descriptor_sets: unsafe {
extern "system" fn cmd_bind_descriptor_sets(
_command_buffer: CommandBuffer,
_pipeline_bind_point: PipelineBindPoint,
_layout: PipelineLayout,
_first_set: u32,
_descriptor_set_count: u32,
_p_descriptor_sets: *const DescriptorSet,
_dynamic_offset_count: u32,
_p_dynamic_offsets: *const u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_descriptor_sets)
))
}
let raw_name = stringify!(vkCmdBindDescriptorSets);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_descriptor_sets
} else {
::std::mem::transmute(val)
}
},
cmd_bind_index_buffer: unsafe {
extern "system" fn cmd_bind_index_buffer(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_index_type: IndexType,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_index_buffer)
))
}
let raw_name = stringify!(vkCmdBindIndexBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_index_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_bind_vertex_buffers: unsafe {
extern "system" fn cmd_bind_vertex_buffers(
_command_buffer: CommandBuffer,
_first_binding: u32,
_binding_count: u32,
_p_buffers: *const Buffer,
_p_offsets: *const DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_vertex_buffers)
))
}
let raw_name = stringify!(vkCmdBindVertexBuffers);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_vertex_buffers
} else {
::std::mem::transmute(val)
}
},
cmd_draw: unsafe {
extern "system" fn cmd_draw(
_command_buffer: CommandBuffer,
_vertex_count: u32,
_instance_count: u32,
_first_vertex: u32,
_first_instance: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_draw)))
}
let raw_name = stringify!(vkCmdDraw);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indexed: unsafe {
extern "system" fn cmd_draw_indexed(
_command_buffer: CommandBuffer,
_index_count: u32,
_instance_count: u32,
_first_index: u32,
_vertex_offset: i32,
_first_instance: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_draw_indexed)))
}
let raw_name = stringify!(vkCmdDrawIndexed);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indexed
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indirect: unsafe {
extern "system" fn cmd_draw_indirect(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_draw_indirect)))
}
let raw_name = stringify!(vkCmdDrawIndirect);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indirect
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indexed_indirect: unsafe {
extern "system" fn cmd_draw_indexed_indirect(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indexed_indirect)
))
}
let raw_name = stringify!(vkCmdDrawIndexedIndirect);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indexed_indirect
} else {
::std::mem::transmute(val)
}
},
cmd_dispatch: unsafe {
extern "system" fn cmd_dispatch(
_command_buffer: CommandBuffer,
_group_count_x: u32,
_group_count_y: u32,
_group_count_z: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_dispatch)))
}
let raw_name = stringify!(vkCmdDispatch);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_dispatch
} else {
::std::mem::transmute(val)
}
},
cmd_dispatch_indirect: unsafe {
extern "system" fn cmd_dispatch_indirect(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_dispatch_indirect)
))
}
let raw_name = stringify!(vkCmdDispatchIndirect);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_dispatch_indirect
} else {
::std::mem::transmute(val)
}
},
cmd_copy_buffer: unsafe {
extern "system" fn cmd_copy_buffer(
_command_buffer: CommandBuffer,
_src_buffer: Buffer,
_dst_buffer: Buffer,
_region_count: u32,
_p_regions: *const BufferCopy,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer)))
}
let raw_name = stringify!(vkCmdCopyBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_copy_image: unsafe {
extern "system" fn cmd_copy_image(
_command_buffer: CommandBuffer,
_src_image: Image,
_src_image_layout: ImageLayout,
_dst_image: Image,
_dst_image_layout: ImageLayout,
_region_count: u32,
_p_regions: *const ImageCopy,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_copy_image)))
}
let raw_name = stringify!(vkCmdCopyImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_image
} else {
::std::mem::transmute(val)
}
},
cmd_blit_image: unsafe {
extern "system" fn cmd_blit_image(
_command_buffer: CommandBuffer,
_src_image: Image,
_src_image_layout: ImageLayout,
_dst_image: Image,
_dst_image_layout: ImageLayout,
_region_count: u32,
_p_regions: *const ImageBlit,
_filter: Filter,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_blit_image)))
}
let raw_name = stringify!(vkCmdBlitImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_blit_image
} else {
::std::mem::transmute(val)
}
},
cmd_copy_buffer_to_image: unsafe {
extern "system" fn cmd_copy_buffer_to_image(
_command_buffer: CommandBuffer,
_src_buffer: Buffer,
_dst_image: Image,
_dst_image_layout: ImageLayout,
_region_count: u32,
_p_regions: *const BufferImageCopy,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_copy_buffer_to_image)
))
}
let raw_name = stringify!(vkCmdCopyBufferToImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_buffer_to_image
} else {
::std::mem::transmute(val)
}
},
cmd_copy_image_to_buffer: unsafe {
extern "system" fn cmd_copy_image_to_buffer(
_command_buffer: CommandBuffer,
_src_image: Image,
_src_image_layout: ImageLayout,
_dst_buffer: Buffer,
_region_count: u32,
_p_regions: *const BufferImageCopy,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_copy_image_to_buffer)
))
}
let raw_name = stringify!(vkCmdCopyImageToBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_image_to_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_update_buffer: unsafe {
extern "system" fn cmd_update_buffer(
_command_buffer: CommandBuffer,
_dst_buffer: Buffer,
_dst_offset: DeviceSize,
_data_size: DeviceSize,
_p_data: *const c_void,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_update_buffer)))
}
let raw_name = stringify!(vkCmdUpdateBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_update_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_fill_buffer: unsafe {
extern "system" fn cmd_fill_buffer(
_command_buffer: CommandBuffer,
_dst_buffer: Buffer,
_dst_offset: DeviceSize,
_size: DeviceSize,
_data: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_fill_buffer)))
}
let raw_name = stringify!(vkCmdFillBuffer);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_fill_buffer
} else {
::std::mem::transmute(val)
}
},
cmd_clear_color_image: unsafe {
extern "system" fn cmd_clear_color_image(
_command_buffer: CommandBuffer,
_image: Image,
_image_layout: ImageLayout,
_p_color: *const ClearColorValue,
_range_count: u32,
_p_ranges: *const ImageSubresourceRange,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_clear_color_image)
))
}
let raw_name = stringify!(vkCmdClearColorImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_clear_color_image
} else {
::std::mem::transmute(val)
}
},
cmd_clear_depth_stencil_image: unsafe {
extern "system" fn cmd_clear_depth_stencil_image(
_command_buffer: CommandBuffer,
_image: Image,
_image_layout: ImageLayout,
_p_depth_stencil: *const ClearDepthStencilValue,
_range_count: u32,
_p_ranges: *const ImageSubresourceRange,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_clear_depth_stencil_image)
))
}
let raw_name = stringify!(vkCmdClearDepthStencilImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_clear_depth_stencil_image
} else {
::std::mem::transmute(val)
}
},
cmd_clear_attachments: unsafe {
extern "system" fn cmd_clear_attachments(
_command_buffer: CommandBuffer,
_attachment_count: u32,
_p_attachments: *const ClearAttachment,
_rect_count: u32,
_p_rects: *const ClearRect,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_clear_attachments)
))
}
let raw_name = stringify!(vkCmdClearAttachments);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_clear_attachments
} else {
::std::mem::transmute(val)
}
},
cmd_resolve_image: unsafe {
extern "system" fn cmd_resolve_image(
_command_buffer: CommandBuffer,
_src_image: Image,
_src_image_layout: ImageLayout,
_dst_image: Image,
_dst_image_layout: ImageLayout,
_region_count: u32,
_p_regions: *const ImageResolve,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_resolve_image)))
}
let raw_name = stringify!(vkCmdResolveImage);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_resolve_image
} else {
::std::mem::transmute(val)
}
},
cmd_set_event: unsafe {
extern "system" fn cmd_set_event(
_command_buffer: CommandBuffer,
_event: Event,
_stage_mask: PipelineStageFlags,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_event)))
}
let raw_name = stringify!(vkCmdSetEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_event
} else {
::std::mem::transmute(val)
}
},
cmd_reset_event: unsafe {
extern "system" fn cmd_reset_event(
_command_buffer: CommandBuffer,
_event: Event,
_stage_mask: PipelineStageFlags,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_reset_event)))
}
let raw_name = stringify!(vkCmdResetEvent);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_reset_event
} else {
::std::mem::transmute(val)
}
},
cmd_wait_events: unsafe {
extern "system" fn cmd_wait_events(
_command_buffer: CommandBuffer,
_event_count: u32,
_p_events: *const Event,
_src_stage_mask: PipelineStageFlags,
_dst_stage_mask: PipelineStageFlags,
_memory_barrier_count: u32,
_p_memory_barriers: *const MemoryBarrier,
_buffer_memory_barrier_count: u32,
_p_buffer_memory_barriers: *const BufferMemoryBarrier,
_image_memory_barrier_count: u32,
_p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_wait_events)))
}
let raw_name = stringify!(vkCmdWaitEvents);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_wait_events
} else {
::std::mem::transmute(val)
}
},
cmd_pipeline_barrier: unsafe {
extern "system" fn cmd_pipeline_barrier(
_command_buffer: CommandBuffer,
_src_stage_mask: PipelineStageFlags,
_dst_stage_mask: PipelineStageFlags,
_dependency_flags: DependencyFlags,
_memory_barrier_count: u32,
_p_memory_barriers: *const MemoryBarrier,
_buffer_memory_barrier_count: u32,
_p_buffer_memory_barriers: *const BufferMemoryBarrier,
_image_memory_barrier_count: u32,
_p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_pipeline_barrier)))
}
let raw_name = stringify!(vkCmdPipelineBarrier);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_pipeline_barrier
} else {
::std::mem::transmute(val)
}
},
cmd_begin_query: unsafe {
extern "system" fn cmd_begin_query(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
_flags: QueryControlFlags,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_begin_query)))
}
let raw_name = stringify!(vkCmdBeginQuery);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_query
} else {
::std::mem::transmute(val)
}
},
cmd_end_query: unsafe {
extern "system" fn cmd_end_query(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_end_query)))
}
let raw_name = stringify!(vkCmdEndQuery);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_query
} else {
::std::mem::transmute(val)
}
},
cmd_reset_query_pool: unsafe {
extern "system" fn cmd_reset_query_pool(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_first_query: u32,
_query_count: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_reset_query_pool)))
}
let raw_name = stringify!(vkCmdResetQueryPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_reset_query_pool
} else {
::std::mem::transmute(val)
}
},
cmd_write_timestamp: unsafe {
extern "system" fn cmd_write_timestamp(
_command_buffer: CommandBuffer,
_pipeline_stage: PipelineStageFlags,
_query_pool: QueryPool,
_query: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_write_timestamp)))
}
let raw_name = stringify!(vkCmdWriteTimestamp);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_write_timestamp
} else {
::std::mem::transmute(val)
}
},
cmd_copy_query_pool_results: unsafe {
extern "system" fn cmd_copy_query_pool_results(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_first_query: u32,
_query_count: u32,
_dst_buffer: Buffer,
_dst_offset: DeviceSize,
_stride: DeviceSize,
_flags: QueryResultFlags,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_copy_query_pool_results)
))
}
let raw_name = stringify!(vkCmdCopyQueryPoolResults);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_query_pool_results
} else {
::std::mem::transmute(val)
}
},
cmd_push_constants: unsafe {
extern "system" fn cmd_push_constants(
_command_buffer: CommandBuffer,
_layout: PipelineLayout,
_stage_flags: ShaderStageFlags,
_offset: u32,
_size: u32,
_p_values: *const c_void,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_push_constants)))
}
let raw_name = stringify!(vkCmdPushConstants);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_push_constants
} else {
::std::mem::transmute(val)
}
},
cmd_begin_render_pass: unsafe {
extern "system" fn cmd_begin_render_pass(
_command_buffer: CommandBuffer,
_p_render_pass_begin: *const RenderPassBeginInfo,
_contents: SubpassContents,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_render_pass)
))
}
let raw_name = stringify!(vkCmdBeginRenderPass);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_render_pass
} else {
::std::mem::transmute(val)
}
},
cmd_next_subpass: unsafe {
extern "system" fn cmd_next_subpass(
_command_buffer: CommandBuffer,
_contents: SubpassContents,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_next_subpass)))
}
let raw_name = stringify!(vkCmdNextSubpass);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_next_subpass
} else {
::std::mem::transmute(val)
}
},
cmd_end_render_pass: unsafe {
extern "system" fn cmd_end_render_pass(_command_buffer: CommandBuffer) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_end_render_pass)))
}
let raw_name = stringify!(vkCmdEndRenderPass);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_render_pass
} else {
::std::mem::transmute(val)
}
},
cmd_execute_commands: unsafe {
extern "system" fn cmd_execute_commands(
_command_buffer: CommandBuffer,
_command_buffer_count: u32,
_p_command_buffers: *const CommandBuffer,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_execute_commands)))
}
let raw_name = stringify!(vkCmdExecuteCommands);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_execute_commands
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDevice.html>"]
pub unsafe fn destroy_device(
&self,
device: Device,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_device)(device, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceQueue.html>"]
pub unsafe fn get_device_queue(
&self,
device: Device,
queue_family_index: u32,
queue_index: u32,
p_queue: *mut Queue,
) -> c_void {
(self.get_device_queue)(device, queue_family_index, queue_index, p_queue)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueSubmit.html>"]
pub unsafe fn queue_submit(
&self,
queue: Queue,
submit_count: u32,
p_submits: *const SubmitInfo,
fence: Fence,
) -> Result {
(self.queue_submit)(queue, submit_count, p_submits, fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueWaitIdle.html>"]
pub unsafe fn queue_wait_idle(&self, queue: Queue) -> Result {
(self.queue_wait_idle)(queue)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDeviceWaitIdle.html>"]
pub unsafe fn device_wait_idle(&self, device: Device) -> Result {
(self.device_wait_idle)(device)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAllocateMemory.html>"]
pub unsafe fn allocate_memory(
&self,
device: Device,
p_allocate_info: *const MemoryAllocateInfo,
p_allocator: *const AllocationCallbacks,
p_memory: *mut DeviceMemory,
) -> Result {
(self.allocate_memory)(device, p_allocate_info, p_allocator, p_memory)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkFreeMemory.html>"]
pub unsafe fn free_memory(
&self,
device: Device,
memory: DeviceMemory,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.free_memory)(device, memory, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkMapMemory.html>"]
pub unsafe fn map_memory(
&self,
device: Device,
memory: DeviceMemory,
offset: DeviceSize,
size: DeviceSize,
flags: MemoryMapFlags,
pp_data: *mut *mut c_void,
) -> Result {
(self.map_memory)(device, memory, offset, size, flags, pp_data)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkUnmapMemory.html>"]
pub unsafe fn unmap_memory(&self, device: Device, memory: DeviceMemory) -> c_void {
(self.unmap_memory)(device, memory)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkFlushMappedMemoryRanges.html>"]
pub unsafe fn flush_mapped_memory_ranges(
&self,
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result {
(self.flush_mapped_memory_ranges)(device, memory_range_count, p_memory_ranges)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkInvalidateMappedMemoryRanges.html>"]
pub unsafe fn invalidate_mapped_memory_ranges(
&self,
device: Device,
memory_range_count: u32,
p_memory_ranges: *const MappedMemoryRange,
) -> Result {
(self.invalidate_mapped_memory_ranges)(device, memory_range_count, p_memory_ranges)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceMemoryCommitment.html>"]
pub unsafe fn get_device_memory_commitment(
&self,
device: Device,
memory: DeviceMemory,
p_committed_memory_in_bytes: *mut DeviceSize,
) -> c_void {
(self.get_device_memory_commitment)(device, memory, p_committed_memory_in_bytes)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBindBufferMemory.html>"]
pub unsafe fn bind_buffer_memory(
&self,
device: Device,
buffer: Buffer,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result {
(self.bind_buffer_memory)(device, buffer, memory, memory_offset)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBindImageMemory.html>"]
pub unsafe fn bind_image_memory(
&self,
device: Device,
image: Image,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> Result {
(self.bind_image_memory)(device, image, memory, memory_offset)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetBufferMemoryRequirements.html>"]
pub unsafe fn get_buffer_memory_requirements(
&self,
device: Device,
buffer: Buffer,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void {
(self.get_buffer_memory_requirements)(device, buffer, p_memory_requirements)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageMemoryRequirements.html>"]
pub unsafe fn get_image_memory_requirements(
&self,
device: Device,
image: Image,
p_memory_requirements: *mut MemoryRequirements,
) -> c_void {
(self.get_image_memory_requirements)(device, image, p_memory_requirements)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageSparseMemoryRequirements.html>"]
pub unsafe fn get_image_sparse_memory_requirements(
&self,
device: Device,
image: Image,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
) -> c_void {
(self.get_image_sparse_memory_requirements)(
device,
image,
p_sparse_memory_requirement_count,
p_sparse_memory_requirements,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueBindSparse.html>"]
pub unsafe fn queue_bind_sparse(
&self,
queue: Queue,
bind_info_count: u32,
p_bind_info: *const BindSparseInfo,
fence: Fence,
) -> Result {
(self.queue_bind_sparse)(queue, bind_info_count, p_bind_info, fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateFence.html>"]
pub unsafe fn create_fence(
&self,
device: Device,
p_create_info: *const FenceCreateInfo,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result {
(self.create_fence)(device, p_create_info, p_allocator, p_fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyFence.html>"]
pub unsafe fn destroy_fence(
&self,
device: Device,
fence: Fence,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_fence)(device, fence, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkResetFences.html>"]
pub unsafe fn reset_fences(
&self,
device: Device,
fence_count: u32,
p_fences: *const Fence,
) -> Result {
(self.reset_fences)(device, fence_count, p_fences)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetFenceStatus.html>"]
pub unsafe fn get_fence_status(&self, device: Device, fence: Fence) -> Result {
(self.get_fence_status)(device, fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkWaitForFences.html>"]
pub unsafe fn wait_for_fences(
&self,
device: Device,
fence_count: u32,
p_fences: *const Fence,
wait_all: Bool32,
timeout: u64,
) -> Result {
(self.wait_for_fences)(device, fence_count, p_fences, wait_all, timeout)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateSemaphore.html>"]
pub unsafe fn create_semaphore(
&self,
device: Device,
p_create_info: *const SemaphoreCreateInfo,
p_allocator: *const AllocationCallbacks,
p_semaphore: *mut Semaphore,
) -> Result {
(self.create_semaphore)(device, p_create_info, p_allocator, p_semaphore)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroySemaphore.html>"]
pub unsafe fn destroy_semaphore(
&self,
device: Device,
semaphore: Semaphore,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_semaphore)(device, semaphore, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateEvent.html>"]
pub unsafe fn create_event(
&self,
device: Device,
p_create_info: *const EventCreateInfo,
p_allocator: *const AllocationCallbacks,
p_event: *mut Event,
) -> Result {
(self.create_event)(device, p_create_info, p_allocator, p_event)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyEvent.html>"]
pub unsafe fn destroy_event(
&self,
device: Device,
event: Event,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_event)(device, event, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetEventStatus.html>"]
pub unsafe fn get_event_status(&self, device: Device, event: Event) -> Result {
(self.get_event_status)(device, event)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkSetEvent.html>"]
pub unsafe fn set_event(&self, device: Device, event: Event) -> Result {
(self.set_event)(device, event)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkResetEvent.html>"]
pub unsafe fn reset_event(&self, device: Device, event: Event) -> Result {
(self.reset_event)(device, event)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateQueryPool.html>"]
pub unsafe fn create_query_pool(
&self,
device: Device,
p_create_info: *const QueryPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_query_pool: *mut QueryPool,
) -> Result {
(self.create_query_pool)(device, p_create_info, p_allocator, p_query_pool)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyQueryPool.html>"]
pub unsafe fn destroy_query_pool(
&self,
device: Device,
query_pool: QueryPool,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_query_pool)(device, query_pool, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetQueryPoolResults.html>"]
pub unsafe fn get_query_pool_results(
&self,
device: Device,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
data_size: usize,
p_data: *mut c_void,
stride: DeviceSize,
flags: QueryResultFlags,
) -> Result {
(self.get_query_pool_results)(
device,
query_pool,
first_query,
query_count,
data_size,
p_data,
stride,
flags,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateBuffer.html>"]
pub unsafe fn create_buffer(
&self,
device: Device,
p_create_info: *const BufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_buffer: *mut Buffer,
) -> Result {
(self.create_buffer)(device, p_create_info, p_allocator, p_buffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyBuffer.html>"]
pub unsafe fn destroy_buffer(
&self,
device: Device,
buffer: Buffer,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_buffer)(device, buffer, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateBufferView.html>"]
pub unsafe fn create_buffer_view(
&self,
device: Device,
p_create_info: *const BufferViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut BufferView,
) -> Result {
(self.create_buffer_view)(device, p_create_info, p_allocator, p_view)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyBufferView.html>"]
pub unsafe fn destroy_buffer_view(
&self,
device: Device,
buffer_view: BufferView,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_buffer_view)(device, buffer_view, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateImage.html>"]
pub unsafe fn create_image(
&self,
device: Device,
p_create_info: *const ImageCreateInfo,
p_allocator: *const AllocationCallbacks,
p_image: *mut Image,
) -> Result {
(self.create_image)(device, p_create_info, p_allocator, p_image)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyImage.html>"]
pub unsafe fn destroy_image(
&self,
device: Device,
image: Image,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_image)(device, image, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageSubresourceLayout.html>"]
pub unsafe fn get_image_subresource_layout(
&self,
device: Device,
image: Image,
p_subresource: *const ImageSubresource,
p_layout: *mut SubresourceLayout,
) -> c_void {
(self.get_image_subresource_layout)(device, image, p_subresource, p_layout)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateImageView.html>"]
pub unsafe fn create_image_view(
&self,
device: Device,
p_create_info: *const ImageViewCreateInfo,
p_allocator: *const AllocationCallbacks,
p_view: *mut ImageView,
) -> Result {
(self.create_image_view)(device, p_create_info, p_allocator, p_view)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyImageView.html>"]
pub unsafe fn destroy_image_view(
&self,
device: Device,
image_view: ImageView,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_image_view)(device, image_view, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateShaderModule.html>"]
pub unsafe fn create_shader_module(
&self,
device: Device,
p_create_info: *const ShaderModuleCreateInfo,
p_allocator: *const AllocationCallbacks,
p_shader_module: *mut ShaderModule,
) -> Result {
(self.create_shader_module)(device, p_create_info, p_allocator, p_shader_module)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyShaderModule.html>"]
pub unsafe fn destroy_shader_module(
&self,
device: Device,
shader_module: ShaderModule,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_shader_module)(device, shader_module, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreatePipelineCache.html>"]
pub unsafe fn create_pipeline_cache(
&self,
device: Device,
p_create_info: *const PipelineCacheCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_cache: *mut PipelineCache,
) -> Result {
(self.create_pipeline_cache)(device, p_create_info, p_allocator, p_pipeline_cache)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyPipelineCache.html>"]
pub unsafe fn destroy_pipeline_cache(
&self,
device: Device,
pipeline_cache: PipelineCache,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_pipeline_cache)(device, pipeline_cache, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPipelineCacheData.html>"]
pub unsafe fn get_pipeline_cache_data(
&self,
device: Device,
pipeline_cache: PipelineCache,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result {
(self.get_pipeline_cache_data)(device, pipeline_cache, p_data_size, p_data)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkMergePipelineCaches.html>"]
pub unsafe fn merge_pipeline_caches(
&self,
device: Device,
dst_cache: PipelineCache,
src_cache_count: u32,
p_src_caches: *const PipelineCache,
) -> Result {
(self.merge_pipeline_caches)(device, dst_cache, src_cache_count, p_src_caches)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateGraphicsPipelines.html>"]
pub unsafe fn create_graphics_pipelines(
&self,
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const GraphicsPipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result {
(self.create_graphics_pipelines)(
device,
pipeline_cache,
create_info_count,
p_create_infos,
p_allocator,
p_pipelines,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateComputePipelines.html>"]
pub unsafe fn create_compute_pipelines(
&self,
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const ComputePipelineCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result {
(self.create_compute_pipelines)(
device,
pipeline_cache,
create_info_count,
p_create_infos,
p_allocator,
p_pipelines,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyPipeline.html>"]
pub unsafe fn destroy_pipeline(
&self,
device: Device,
pipeline: Pipeline,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_pipeline)(device, pipeline, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreatePipelineLayout.html>"]
pub unsafe fn create_pipeline_layout(
&self,
device: Device,
p_create_info: *const PipelineLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_pipeline_layout: *mut PipelineLayout,
) -> Result {
(self.create_pipeline_layout)(device, p_create_info, p_allocator, p_pipeline_layout)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyPipelineLayout.html>"]
pub unsafe fn destroy_pipeline_layout(
&self,
device: Device,
pipeline_layout: PipelineLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_pipeline_layout)(device, pipeline_layout, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateSampler.html>"]
pub unsafe fn create_sampler(
&self,
device: Device,
p_create_info: *const SamplerCreateInfo,
p_allocator: *const AllocationCallbacks,
p_sampler: *mut Sampler,
) -> Result {
(self.create_sampler)(device, p_create_info, p_allocator, p_sampler)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroySampler.html>"]
pub unsafe fn destroy_sampler(
&self,
device: Device,
sampler: Sampler,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_sampler)(device, sampler, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDescriptorSetLayout.html>"]
pub unsafe fn create_descriptor_set_layout(
&self,
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_allocator: *const AllocationCallbacks,
p_set_layout: *mut DescriptorSetLayout,
) -> Result {
(self.create_descriptor_set_layout)(device, p_create_info, p_allocator, p_set_layout)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDescriptorSetLayout.html>"]
pub unsafe fn destroy_descriptor_set_layout(
&self,
device: Device,
descriptor_set_layout: DescriptorSetLayout,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_descriptor_set_layout)(device, descriptor_set_layout, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDescriptorPool.html>"]
pub unsafe fn create_descriptor_pool(
&self,
device: Device,
p_create_info: *const DescriptorPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_pool: *mut DescriptorPool,
) -> Result {
(self.create_descriptor_pool)(device, p_create_info, p_allocator, p_descriptor_pool)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDescriptorPool.html>"]
pub unsafe fn destroy_descriptor_pool(
&self,
device: Device,
descriptor_pool: DescriptorPool,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_descriptor_pool)(device, descriptor_pool, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkResetDescriptorPool.html>"]
pub unsafe fn reset_descriptor_pool(
&self,
device: Device,
descriptor_pool: DescriptorPool,
flags: DescriptorPoolResetFlags,
) -> Result {
(self.reset_descriptor_pool)(device, descriptor_pool, flags)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAllocateDescriptorSets.html>"]
pub unsafe fn allocate_descriptor_sets(
&self,
device: Device,
p_allocate_info: *const DescriptorSetAllocateInfo,
p_descriptor_sets: *mut DescriptorSet,
) -> Result {
(self.allocate_descriptor_sets)(device, p_allocate_info, p_descriptor_sets)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkFreeDescriptorSets.html>"]
pub unsafe fn free_descriptor_sets(
&self,
device: Device,
descriptor_pool: DescriptorPool,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
) -> Result {
(self.free_descriptor_sets)(
device,
descriptor_pool,
descriptor_set_count,
p_descriptor_sets,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkUpdateDescriptorSets.html>"]
pub unsafe fn update_descriptor_sets(
&self,
device: Device,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
descriptor_copy_count: u32,
p_descriptor_copies: *const CopyDescriptorSet,
) -> c_void {
(self.update_descriptor_sets)(
device,
descriptor_write_count,
p_descriptor_writes,
descriptor_copy_count,
p_descriptor_copies,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateFramebuffer.html>"]
pub unsafe fn create_framebuffer(
&self,
device: Device,
p_create_info: *const FramebufferCreateInfo,
p_allocator: *const AllocationCallbacks,
p_framebuffer: *mut Framebuffer,
) -> Result {
(self.create_framebuffer)(device, p_create_info, p_allocator, p_framebuffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyFramebuffer.html>"]
pub unsafe fn destroy_framebuffer(
&self,
device: Device,
framebuffer: Framebuffer,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_framebuffer)(device, framebuffer, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateRenderPass.html>"]
pub unsafe fn create_render_pass(
&self,
device: Device,
p_create_info: *const RenderPassCreateInfo,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result {
(self.create_render_pass)(device, p_create_info, p_allocator, p_render_pass)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyRenderPass.html>"]
pub unsafe fn destroy_render_pass(
&self,
device: Device,
render_pass: RenderPass,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_render_pass)(device, render_pass, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetRenderAreaGranularity.html>"]
pub unsafe fn get_render_area_granularity(
&self,
device: Device,
render_pass: RenderPass,
p_granularity: *mut Extent2D,
) -> c_void {
(self.get_render_area_granularity)(device, render_pass, p_granularity)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateCommandPool.html>"]
pub unsafe fn create_command_pool(
&self,
device: Device,
p_create_info: *const CommandPoolCreateInfo,
p_allocator: *const AllocationCallbacks,
p_command_pool: *mut CommandPool,
) -> Result {
(self.create_command_pool)(device, p_create_info, p_allocator, p_command_pool)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyCommandPool.html>"]
pub unsafe fn destroy_command_pool(
&self,
device: Device,
command_pool: CommandPool,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_command_pool)(device, command_pool, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkResetCommandPool.html>"]
pub unsafe fn reset_command_pool(
&self,
device: Device,
command_pool: CommandPool,
flags: CommandPoolResetFlags,
) -> Result {
(self.reset_command_pool)(device, command_pool, flags)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAllocateCommandBuffers.html>"]
pub unsafe fn allocate_command_buffers(
&self,
device: Device,
p_allocate_info: *const CommandBufferAllocateInfo,
p_command_buffers: *mut CommandBuffer,
) -> Result {
(self.allocate_command_buffers)(device, p_allocate_info, p_command_buffers)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkFreeCommandBuffers.html>"]
pub unsafe fn free_command_buffers(
&self,
device: Device,
command_pool: CommandPool,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void {
(self.free_command_buffers)(
device,
command_pool,
command_buffer_count,
p_command_buffers,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBeginCommandBuffer.html>"]
pub unsafe fn begin_command_buffer(
&self,
command_buffer: CommandBuffer,
p_begin_info: *const CommandBufferBeginInfo,
) -> Result {
(self.begin_command_buffer)(command_buffer, p_begin_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEndCommandBuffer.html>"]
pub unsafe fn end_command_buffer(&self, command_buffer: CommandBuffer) -> Result {
(self.end_command_buffer)(command_buffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkResetCommandBuffer.html>"]
pub unsafe fn reset_command_buffer(
&self,
command_buffer: CommandBuffer,
flags: CommandBufferResetFlags,
) -> Result {
(self.reset_command_buffer)(command_buffer, flags)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindPipeline.html>"]
pub unsafe fn cmd_bind_pipeline(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
pipeline: Pipeline,
) -> c_void {
(self.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetViewport.html>"]
pub unsafe fn cmd_set_viewport(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewports: *const Viewport,
) -> c_void {
(self.cmd_set_viewport)(command_buffer, first_viewport, viewport_count, p_viewports)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetScissor.html>"]
pub unsafe fn cmd_set_scissor(
&self,
command_buffer: CommandBuffer,
first_scissor: u32,
scissor_count: u32,
p_scissors: *const Rect2D,
) -> c_void {
(self.cmd_set_scissor)(command_buffer, first_scissor, scissor_count, p_scissors)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetLineWidth.html>"]
pub unsafe fn cmd_set_line_width(
&self,
command_buffer: CommandBuffer,
line_width: f32,
) -> c_void {
(self.cmd_set_line_width)(command_buffer, line_width)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetDepthBias.html>"]
pub unsafe fn cmd_set_depth_bias(
&self,
command_buffer: CommandBuffer,
depth_bias_constant_factor: f32,
depth_bias_clamp: f32,
depth_bias_slope_factor: f32,
) -> c_void {
(self.cmd_set_depth_bias)(
command_buffer,
depth_bias_constant_factor,
depth_bias_clamp,
depth_bias_slope_factor,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetBlendConstants.html>"]
pub unsafe fn cmd_set_blend_constants(
&self,
command_buffer: CommandBuffer,
blend_constants: &[f32; 4],
) -> c_void {
(self.cmd_set_blend_constants)(command_buffer, blend_constants)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetDepthBounds.html>"]
pub unsafe fn cmd_set_depth_bounds(
&self,
command_buffer: CommandBuffer,
min_depth_bounds: f32,
max_depth_bounds: f32,
) -> c_void {
(self.cmd_set_depth_bounds)(command_buffer, min_depth_bounds, max_depth_bounds)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetStencilCompareMask.html>"]
pub unsafe fn cmd_set_stencil_compare_mask(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
compare_mask: u32,
) -> c_void {
(self.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetStencilWriteMask.html>"]
pub unsafe fn cmd_set_stencil_write_mask(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
write_mask: u32,
) -> c_void {
(self.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetStencilReference.html>"]
pub unsafe fn cmd_set_stencil_reference(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
reference: u32,
) -> c_void {
(self.cmd_set_stencil_reference)(command_buffer, face_mask, reference)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindDescriptorSets.html>"]
pub unsafe fn cmd_bind_descriptor_sets(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
first_set: u32,
descriptor_set_count: u32,
p_descriptor_sets: *const DescriptorSet,
dynamic_offset_count: u32,
p_dynamic_offsets: *const u32,
) -> c_void {
(self.cmd_bind_descriptor_sets)(
command_buffer,
pipeline_bind_point,
layout,
first_set,
descriptor_set_count,
p_descriptor_sets,
dynamic_offset_count,
p_dynamic_offsets,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindIndexBuffer.html>"]
pub unsafe fn cmd_bind_index_buffer(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
index_type: IndexType,
) -> c_void {
(self.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindVertexBuffers.html>"]
pub unsafe fn cmd_bind_vertex_buffers(
&self,
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
) -> c_void {
(self.cmd_bind_vertex_buffers)(
command_buffer,
first_binding,
binding_count,
p_buffers,
p_offsets,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDraw.html>"]
pub unsafe fn cmd_draw(
&self,
command_buffer: CommandBuffer,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) -> c_void {
(self.cmd_draw)(
command_buffer,
vertex_count,
instance_count,
first_vertex,
first_instance,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndexed.html>"]
pub unsafe fn cmd_draw_indexed(
&self,
command_buffer: CommandBuffer,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) -> c_void {
(self.cmd_draw_indexed)(
command_buffer,
index_count,
instance_count,
first_index,
vertex_offset,
first_instance,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndirect.html>"]
pub unsafe fn cmd_draw_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndexedIndirect.html>"]
pub unsafe fn cmd_draw_indexed_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indexed_indirect)(command_buffer, buffer, offset, draw_count, stride)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDispatch.html>"]
pub unsafe fn cmd_dispatch(
&self,
command_buffer: CommandBuffer,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void {
(self.cmd_dispatch)(command_buffer, group_count_x, group_count_y, group_count_z)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDispatchIndirect.html>"]
pub unsafe fn cmd_dispatch_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
) -> c_void {
(self.cmd_dispatch_indirect)(command_buffer, buffer, offset)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyBuffer.html>"]
pub unsafe fn cmd_copy_buffer(
&self,
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferCopy,
) -> c_void {
(self.cmd_copy_buffer)(
command_buffer,
src_buffer,
dst_buffer,
region_count,
p_regions,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyImage.html>"]
pub unsafe fn cmd_copy_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageCopy,
) -> c_void {
(self.cmd_copy_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
region_count,
p_regions,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBlitImage.html>"]
pub unsafe fn cmd_blit_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageBlit,
filter: Filter,
) -> c_void {
(self.cmd_blit_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
region_count,
p_regions,
filter,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyBufferToImage.html>"]
pub unsafe fn cmd_copy_buffer_to_image(
&self,
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void {
(self.cmd_copy_buffer_to_image)(
command_buffer,
src_buffer,
dst_image,
dst_image_layout,
region_count,
p_regions,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyImageToBuffer.html>"]
pub unsafe fn cmd_copy_image_to_buffer(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_buffer: Buffer,
region_count: u32,
p_regions: *const BufferImageCopy,
) -> c_void {
(self.cmd_copy_image_to_buffer)(
command_buffer,
src_image,
src_image_layout,
dst_buffer,
region_count,
p_regions,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdUpdateBuffer.html>"]
pub unsafe fn cmd_update_buffer(
&self,
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
data_size: DeviceSize,
p_data: *const c_void,
) -> c_void {
(self.cmd_update_buffer)(command_buffer, dst_buffer, dst_offset, data_size, p_data)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdFillBuffer.html>"]
pub unsafe fn cmd_fill_buffer(
&self,
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
size: DeviceSize,
data: u32,
) -> c_void {
(self.cmd_fill_buffer)(command_buffer, dst_buffer, dst_offset, size, data)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdClearColorImage.html>"]
pub unsafe fn cmd_clear_color_image(
&self,
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_color: *const ClearColorValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void {
(self.cmd_clear_color_image)(
command_buffer,
image,
image_layout,
p_color,
range_count,
p_ranges,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdClearDepthStencilImage.html>"]
pub unsafe fn cmd_clear_depth_stencil_image(
&self,
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
p_depth_stencil: *const ClearDepthStencilValue,
range_count: u32,
p_ranges: *const ImageSubresourceRange,
) -> c_void {
(self.cmd_clear_depth_stencil_image)(
command_buffer,
image,
image_layout,
p_depth_stencil,
range_count,
p_ranges,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdClearAttachments.html>"]
pub unsafe fn cmd_clear_attachments(
&self,
command_buffer: CommandBuffer,
attachment_count: u32,
p_attachments: *const ClearAttachment,
rect_count: u32,
p_rects: *const ClearRect,
) -> c_void {
(self.cmd_clear_attachments)(
command_buffer,
attachment_count,
p_attachments,
rect_count,
p_rects,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdResolveImage.html>"]
pub unsafe fn cmd_resolve_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
region_count: u32,
p_regions: *const ImageResolve,
) -> c_void {
(self.cmd_resolve_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
region_count,
p_regions,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetEvent.html>"]
pub unsafe fn cmd_set_event(
&self,
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void {
(self.cmd_set_event)(command_buffer, event, stage_mask)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdResetEvent.html>"]
pub unsafe fn cmd_reset_event(
&self,
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) -> c_void {
(self.cmd_reset_event)(command_buffer, event, stage_mask)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdWaitEvents.html>"]
pub unsafe fn cmd_wait_events(
&self,
command_buffer: CommandBuffer,
event_count: u32,
p_events: *const Event,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void {
(self.cmd_wait_events)(
command_buffer,
event_count,
p_events,
src_stage_mask,
dst_stage_mask,
memory_barrier_count,
p_memory_barriers,
buffer_memory_barrier_count,
p_buffer_memory_barriers,
image_memory_barrier_count,
p_image_memory_barriers,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdPipelineBarrier.html>"]
pub unsafe fn cmd_pipeline_barrier(
&self,
command_buffer: CommandBuffer,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
dependency_flags: DependencyFlags,
memory_barrier_count: u32,
p_memory_barriers: *const MemoryBarrier,
buffer_memory_barrier_count: u32,
p_buffer_memory_barriers: *const BufferMemoryBarrier,
image_memory_barrier_count: u32,
p_image_memory_barriers: *const ImageMemoryBarrier,
) -> c_void {
(self.cmd_pipeline_barrier)(
command_buffer,
src_stage_mask,
dst_stage_mask,
dependency_flags,
memory_barrier_count,
p_memory_barriers,
buffer_memory_barrier_count,
p_buffer_memory_barriers,
image_memory_barrier_count,
p_image_memory_barriers,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginQuery.html>"]
pub unsafe fn cmd_begin_query(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
) -> c_void {
(self.cmd_begin_query)(command_buffer, query_pool, query, flags)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndQuery.html>"]
pub unsafe fn cmd_end_query(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
) -> c_void {
(self.cmd_end_query)(command_buffer, query_pool, query)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdResetQueryPool.html>"]
pub unsafe fn cmd_reset_query_pool(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
) -> c_void {
(self.cmd_reset_query_pool)(command_buffer, query_pool, first_query, query_count)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdWriteTimestamp.html>"]
pub unsafe fn cmd_write_timestamp(
&self,
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
query_pool: QueryPool,
query: u32,
) -> c_void {
(self.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyQueryPoolResults.html>"]
pub unsafe fn cmd_copy_query_pool_results(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
dst_buffer: Buffer,
dst_offset: DeviceSize,
stride: DeviceSize,
flags: QueryResultFlags,
) -> c_void {
(self.cmd_copy_query_pool_results)(
command_buffer,
query_pool,
first_query,
query_count,
dst_buffer,
dst_offset,
stride,
flags,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdPushConstants.html>"]
pub unsafe fn cmd_push_constants(
&self,
command_buffer: CommandBuffer,
layout: PipelineLayout,
stage_flags: ShaderStageFlags,
offset: u32,
size: u32,
p_values: *const c_void,
) -> c_void {
(self.cmd_push_constants)(command_buffer, layout, stage_flags, offset, size, p_values)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginRenderPass.html>"]
pub unsafe fn cmd_begin_render_pass(
&self,
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
contents: SubpassContents,
) -> c_void {
(self.cmd_begin_render_pass)(command_buffer, p_render_pass_begin, contents)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdNextSubpass.html>"]
pub unsafe fn cmd_next_subpass(
&self,
command_buffer: CommandBuffer,
contents: SubpassContents,
) -> c_void {
(self.cmd_next_subpass)(command_buffer, contents)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndRenderPass.html>"]
pub unsafe fn cmd_end_render_pass(&self, command_buffer: CommandBuffer) -> c_void {
(self.cmd_end_render_pass)(command_buffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdExecuteCommands.html>"]
pub unsafe fn cmd_execute_commands(
&self,
command_buffer: CommandBuffer,
command_buffer_count: u32,
p_command_buffers: *const CommandBuffer,
) -> c_void {
(self.cmd_execute_commands)(command_buffer, command_buffer_count, p_command_buffers)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkEnumerateInstanceVersion = extern "system" fn(p_api_version: *mut u32) -> Result;
pub struct EntryFnV1_1 {
pub enumerate_instance_version: extern "system" fn(p_api_version: *mut u32) -> Result,
}
unsafe impl Send for EntryFnV1_1 {}
unsafe impl Sync for EntryFnV1_1 {}
impl ::std::clone::Clone for EntryFnV1_1 {
fn clone(&self) -> Self {
EntryFnV1_1 {
enumerate_instance_version: self.enumerate_instance_version,
}
}
}
impl EntryFnV1_1 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
EntryFnV1_1 {
enumerate_instance_version: unsafe {
extern "system" fn enumerate_instance_version(_p_api_version: *mut u32) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_instance_version)
))
}
let raw_name = stringify!(vkEnumerateInstanceVersion);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_instance_version
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceVersion.html>"]
pub unsafe fn enumerate_instance_version(&self, p_api_version: *mut u32) -> Result {
(self.enumerate_instance_version)(p_api_version)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkEnumeratePhysicalDeviceGroups = extern "system" fn(
instance: Instance,
p_physical_device_group_count: *mut u32,
p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceFeatures2 = extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceFormatProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceImageFormatProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
p_image_format_properties: *mut ImageFormatProperties2,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceMemoryProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 = extern "system" fn(
physical_device: PhysicalDevice,
p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceExternalBufferProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
p_external_buffer_properties: *mut ExternalBufferProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceExternalFenceProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
p_external_fence_properties: *mut ExternalFenceProperties,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceExternalSemaphoreProperties = extern "system" fn(
physical_device: PhysicalDevice,
p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
) -> c_void;
pub struct InstanceFnV1_1 {
pub enumerate_physical_device_groups: extern "system" fn(
instance: Instance,
p_physical_device_group_count: *mut u32,
p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
) -> Result,
pub get_physical_device_features2: extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures2,
) -> c_void,
pub get_physical_device_properties2: extern "system" fn(
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties2,
) -> c_void,
pub get_physical_device_format_properties2: extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties2,
) -> c_void,
pub get_physical_device_image_format_properties2: extern "system" fn(
physical_device: PhysicalDevice,
p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
p_image_format_properties: *mut ImageFormatProperties2,
) -> Result,
pub get_physical_device_queue_family_properties2: extern "system" fn(
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties2,
) -> c_void,
pub get_physical_device_memory_properties2: extern "system" fn(
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
) -> c_void,
pub get_physical_device_sparse_image_format_properties2: extern "system" fn(
physical_device: PhysicalDevice,
p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties2,
) -> c_void,
pub get_physical_device_external_buffer_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
p_external_buffer_properties: *mut ExternalBufferProperties,
) -> c_void,
pub get_physical_device_external_fence_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
p_external_fence_properties: *mut ExternalFenceProperties,
) -> c_void,
pub get_physical_device_external_semaphore_properties: extern "system" fn(
physical_device: PhysicalDevice,
p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
) -> c_void,
}
unsafe impl Send for InstanceFnV1_1 {}
unsafe impl Sync for InstanceFnV1_1 {}
impl ::std::clone::Clone for InstanceFnV1_1 {
fn clone(&self) -> Self {
InstanceFnV1_1 {
enumerate_physical_device_groups: self.enumerate_physical_device_groups,
get_physical_device_features2: self.get_physical_device_features2,
get_physical_device_properties2: self.get_physical_device_properties2,
get_physical_device_format_properties2: self.get_physical_device_format_properties2,
get_physical_device_image_format_properties2: self
.get_physical_device_image_format_properties2,
get_physical_device_queue_family_properties2: self
.get_physical_device_queue_family_properties2,
get_physical_device_memory_properties2: self.get_physical_device_memory_properties2,
get_physical_device_sparse_image_format_properties2: self
.get_physical_device_sparse_image_format_properties2,
get_physical_device_external_buffer_properties: self
.get_physical_device_external_buffer_properties,
get_physical_device_external_fence_properties: self
.get_physical_device_external_fence_properties,
get_physical_device_external_semaphore_properties: self
.get_physical_device_external_semaphore_properties,
}
}
}
impl InstanceFnV1_1 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
InstanceFnV1_1 {
enumerate_physical_device_groups: unsafe {
extern "system" fn enumerate_physical_device_groups(
_instance: Instance,
_p_physical_device_group_count: *mut u32,
_p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(enumerate_physical_device_groups)
))
}
let raw_name = stringify!(vkEnumeratePhysicalDeviceGroups);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
enumerate_physical_device_groups
} else {
::std::mem::transmute(val)
}
},
get_physical_device_features2: unsafe {
extern "system" fn get_physical_device_features2(
_physical_device: PhysicalDevice,
_p_features: *mut PhysicalDeviceFeatures2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_features2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceFeatures2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_features2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_properties2: unsafe {
extern "system" fn get_physical_device_properties2(
_physical_device: PhysicalDevice,
_p_properties: *mut PhysicalDeviceProperties2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_format_properties2: unsafe {
extern "system" fn get_physical_device_format_properties2(
_physical_device: PhysicalDevice,
_format: Format,
_p_format_properties: *mut FormatProperties2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_format_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceFormatProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_format_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_image_format_properties2: unsafe {
extern "system" fn get_physical_device_image_format_properties2(
_physical_device: PhysicalDevice,
_p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
_p_image_format_properties: *mut ImageFormatProperties2,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_image_format_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceImageFormatProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_image_format_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_queue_family_properties2: unsafe {
extern "system" fn get_physical_device_queue_family_properties2(
_physical_device: PhysicalDevice,
_p_queue_family_property_count: *mut u32,
_p_queue_family_properties: *mut QueueFamilyProperties2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_queue_family_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceQueueFamilyProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_queue_family_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_memory_properties2: unsafe {
extern "system" fn get_physical_device_memory_properties2(
_physical_device: PhysicalDevice,
_p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_memory_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceMemoryProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_memory_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_sparse_image_format_properties2: unsafe {
extern "system" fn get_physical_device_sparse_image_format_properties2(
_physical_device: PhysicalDevice,
_p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
_p_property_count: *mut u32,
_p_properties: *mut SparseImageFormatProperties2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_sparse_image_format_properties2)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSparseImageFormatProperties2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_sparse_image_format_properties2
} else {
::std::mem::transmute(val)
}
},
get_physical_device_external_buffer_properties: unsafe {
extern "system" fn get_physical_device_external_buffer_properties(
_physical_device: PhysicalDevice,
_p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
_p_external_buffer_properties: *mut ExternalBufferProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_external_buffer_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceExternalBufferProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_external_buffer_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_external_fence_properties: unsafe {
extern "system" fn get_physical_device_external_fence_properties(
_physical_device: PhysicalDevice,
_p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
_p_external_fence_properties: *mut ExternalFenceProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_external_fence_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceExternalFenceProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_external_fence_properties
} else {
::std::mem::transmute(val)
}
},
get_physical_device_external_semaphore_properties: unsafe {
extern "system" fn get_physical_device_external_semaphore_properties(
_physical_device: PhysicalDevice,
_p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
_p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_external_semaphore_properties)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceExternalSemaphoreProperties);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_external_semaphore_properties
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDeviceGroups.html>"]
pub unsafe fn enumerate_physical_device_groups(
&self,
instance: Instance,
p_physical_device_group_count: *mut u32,
p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
) -> Result {
(self.enumerate_physical_device_groups)(
instance,
p_physical_device_group_count,
p_physical_device_group_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceFeatures2.html>"]
pub unsafe fn get_physical_device_features2(
&self,
physical_device: PhysicalDevice,
p_features: *mut PhysicalDeviceFeatures2,
) -> c_void {
(self.get_physical_device_features2)(physical_device, p_features)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceProperties2.html>"]
pub unsafe fn get_physical_device_properties2(
&self,
physical_device: PhysicalDevice,
p_properties: *mut PhysicalDeviceProperties2,
) -> c_void {
(self.get_physical_device_properties2)(physical_device, p_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceFormatProperties2.html>"]
pub unsafe fn get_physical_device_format_properties2(
&self,
physical_device: PhysicalDevice,
format: Format,
p_format_properties: *mut FormatProperties2,
) -> c_void {
(self.get_physical_device_format_properties2)(physical_device, format, p_format_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceImageFormatProperties2.html>"]
pub unsafe fn get_physical_device_image_format_properties2(
&self,
physical_device: PhysicalDevice,
p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
p_image_format_properties: *mut ImageFormatProperties2,
) -> Result {
(self.get_physical_device_image_format_properties2)(
physical_device,
p_image_format_info,
p_image_format_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html>"]
pub unsafe fn get_physical_device_queue_family_properties2(
&self,
physical_device: PhysicalDevice,
p_queue_family_property_count: *mut u32,
p_queue_family_properties: *mut QueueFamilyProperties2,
) -> c_void {
(self.get_physical_device_queue_family_properties2)(
physical_device,
p_queue_family_property_count,
p_queue_family_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceMemoryProperties2.html>"]
pub unsafe fn get_physical_device_memory_properties2(
&self,
physical_device: PhysicalDevice,
p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
) -> c_void {
(self.get_physical_device_memory_properties2)(physical_device, p_memory_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html>"]
pub unsafe fn get_physical_device_sparse_image_format_properties2(
&self,
physical_device: PhysicalDevice,
p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
p_property_count: *mut u32,
p_properties: *mut SparseImageFormatProperties2,
) -> c_void {
(self.get_physical_device_sparse_image_format_properties2)(
physical_device,
p_format_info,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceExternalBufferProperties.html>"]
pub unsafe fn get_physical_device_external_buffer_properties(
&self,
physical_device: PhysicalDevice,
p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
p_external_buffer_properties: *mut ExternalBufferProperties,
) -> c_void {
(self.get_physical_device_external_buffer_properties)(
physical_device,
p_external_buffer_info,
p_external_buffer_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceExternalFenceProperties.html>"]
pub unsafe fn get_physical_device_external_fence_properties(
&self,
physical_device: PhysicalDevice,
p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
p_external_fence_properties: *mut ExternalFenceProperties,
) -> c_void {
(self.get_physical_device_external_fence_properties)(
physical_device,
p_external_fence_info,
p_external_fence_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html>"]
pub unsafe fn get_physical_device_external_semaphore_properties(
&self,
physical_device: PhysicalDevice,
p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
) -> c_void {
(self.get_physical_device_external_semaphore_properties)(
physical_device,
p_external_semaphore_info,
p_external_semaphore_properties,
)
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkBindBufferMemory2 = extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindBufferMemoryInfo,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkBindImageMemory2 = extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindImageMemoryInfo,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceGroupPeerMemoryFeatures = extern "system" fn(
device: Device,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
p_peer_memory_features: *mut PeerMemoryFeatureFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetDeviceMask =
extern "system" fn(command_buffer: CommandBuffer, device_mask: u32) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDispatchBase = extern "system" fn(
command_buffer: CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageMemoryRequirements2 = extern "system" fn(
device: Device,
p_info: *const ImageMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetBufferMemoryRequirements2 = extern "system" fn(
device: Device,
p_info: *const BufferMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageSparseMemoryRequirements2 = extern "system" fn(
device: Device,
p_info: *const ImageSparseMemoryRequirementsInfo2,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkTrimCommandPool = extern "system" fn(
device: Device,
command_pool: CommandPool,
flags: CommandPoolTrimFlags,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceQueue2 = extern "system" fn(
device: Device,
p_queue_info: *const DeviceQueueInfo2,
p_queue: *mut Queue,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateSamplerYcbcrConversion = extern "system" fn(
device: Device,
p_create_info: *const SamplerYcbcrConversionCreateInfo,
p_allocator: *const AllocationCallbacks,
p_ycbcr_conversion: *mut SamplerYcbcrConversion,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroySamplerYcbcrConversion = extern "system" fn(
device: Device,
ycbcr_conversion: SamplerYcbcrConversion,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDescriptorUpdateTemplate = extern "system" fn(
device: Device,
p_create_info: *const DescriptorUpdateTemplateCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_update_template: *mut DescriptorUpdateTemplate,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDescriptorUpdateTemplate = extern "system" fn(
device: Device,
descriptor_update_template: DescriptorUpdateTemplate,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkUpdateDescriptorSetWithTemplate = extern "system" fn(
device: Device,
descriptor_set: DescriptorSet,
descriptor_update_template: DescriptorUpdateTemplate,
p_data: *const c_void,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDescriptorSetLayoutSupport = extern "system" fn(
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_support: *mut DescriptorSetLayoutSupport,
) -> c_void;
pub struct DeviceFnV1_1 {
pub bind_buffer_memory2: extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindBufferMemoryInfo,
) -> Result,
pub bind_image_memory2: extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindImageMemoryInfo,
) -> Result,
pub get_device_group_peer_memory_features: extern "system" fn(
device: Device,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
p_peer_memory_features: *mut PeerMemoryFeatureFlags,
) -> c_void,
pub cmd_set_device_mask:
extern "system" fn(command_buffer: CommandBuffer, device_mask: u32) -> c_void,
pub cmd_dispatch_base: extern "system" fn(
command_buffer: CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void,
pub get_image_memory_requirements2: extern "system" fn(
device: Device,
p_info: *const ImageMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void,
pub get_buffer_memory_requirements2: extern "system" fn(
device: Device,
p_info: *const BufferMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void,
pub get_image_sparse_memory_requirements2: extern "system" fn(
device: Device,
p_info: *const ImageSparseMemoryRequirementsInfo2,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
) -> c_void,
pub trim_command_pool: extern "system" fn(
device: Device,
command_pool: CommandPool,
flags: CommandPoolTrimFlags,
) -> c_void,
pub get_device_queue2: extern "system" fn(
device: Device,
p_queue_info: *const DeviceQueueInfo2,
p_queue: *mut Queue,
) -> c_void,
pub create_sampler_ycbcr_conversion: extern "system" fn(
device: Device,
p_create_info: *const SamplerYcbcrConversionCreateInfo,
p_allocator: *const AllocationCallbacks,
p_ycbcr_conversion: *mut SamplerYcbcrConversion,
) -> Result,
pub destroy_sampler_ycbcr_conversion: extern "system" fn(
device: Device,
ycbcr_conversion: SamplerYcbcrConversion,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_descriptor_update_template: extern "system" fn(
device: Device,
p_create_info: *const DescriptorUpdateTemplateCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_update_template: *mut DescriptorUpdateTemplate,
) -> Result,
pub destroy_descriptor_update_template: extern "system" fn(
device: Device,
descriptor_update_template: DescriptorUpdateTemplate,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub update_descriptor_set_with_template: extern "system" fn(
device: Device,
descriptor_set: DescriptorSet,
descriptor_update_template: DescriptorUpdateTemplate,
p_data: *const c_void,
) -> c_void,
pub get_descriptor_set_layout_support: extern "system" fn(
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_support: *mut DescriptorSetLayoutSupport,
) -> c_void,
}
unsafe impl Send for DeviceFnV1_1 {}
unsafe impl Sync for DeviceFnV1_1 {}
impl ::std::clone::Clone for DeviceFnV1_1 {
fn clone(&self) -> Self {
DeviceFnV1_1 {
bind_buffer_memory2: self.bind_buffer_memory2,
bind_image_memory2: self.bind_image_memory2,
get_device_group_peer_memory_features: self.get_device_group_peer_memory_features,
cmd_set_device_mask: self.cmd_set_device_mask,
cmd_dispatch_base: self.cmd_dispatch_base,
get_image_memory_requirements2: self.get_image_memory_requirements2,
get_buffer_memory_requirements2: self.get_buffer_memory_requirements2,
get_image_sparse_memory_requirements2: self.get_image_sparse_memory_requirements2,
trim_command_pool: self.trim_command_pool,
get_device_queue2: self.get_device_queue2,
create_sampler_ycbcr_conversion: self.create_sampler_ycbcr_conversion,
destroy_sampler_ycbcr_conversion: self.destroy_sampler_ycbcr_conversion,
create_descriptor_update_template: self.create_descriptor_update_template,
destroy_descriptor_update_template: self.destroy_descriptor_update_template,
update_descriptor_set_with_template: self.update_descriptor_set_with_template,
get_descriptor_set_layout_support: self.get_descriptor_set_layout_support,
}
}
}
impl DeviceFnV1_1 {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
DeviceFnV1_1 {
bind_buffer_memory2: unsafe {
extern "system" fn bind_buffer_memory2(
_device: Device,
_bind_info_count: u32,
_p_bind_infos: *const BindBufferMemoryInfo,
) -> Result {
panic!(concat!("Unable to load ", stringify!(bind_buffer_memory2)))
}
let raw_name = stringify!(vkBindBufferMemory2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
bind_buffer_memory2
} else {
::std::mem::transmute(val)
}
},
bind_image_memory2: unsafe {
extern "system" fn bind_image_memory2(
_device: Device,
_bind_info_count: u32,
_p_bind_infos: *const BindImageMemoryInfo,
) -> Result {
panic!(concat!("Unable to load ", stringify!(bind_image_memory2)))
}
let raw_name = stringify!(vkBindImageMemory2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
bind_image_memory2
} else {
::std::mem::transmute(val)
}
},
get_device_group_peer_memory_features: unsafe {
extern "system" fn get_device_group_peer_memory_features(
_device: Device,
_heap_index: u32,
_local_device_index: u32,
_remote_device_index: u32,
_p_peer_memory_features: *mut PeerMemoryFeatureFlags,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_device_group_peer_memory_features)
))
}
let raw_name = stringify!(vkGetDeviceGroupPeerMemoryFeatures);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_group_peer_memory_features
} else {
::std::mem::transmute(val)
}
},
cmd_set_device_mask: unsafe {
extern "system" fn cmd_set_device_mask(
_command_buffer: CommandBuffer,
_device_mask: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_set_device_mask)))
}
let raw_name = stringify!(vkCmdSetDeviceMask);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_device_mask
} else {
::std::mem::transmute(val)
}
},
cmd_dispatch_base: unsafe {
extern "system" fn cmd_dispatch_base(
_command_buffer: CommandBuffer,
_base_group_x: u32,
_base_group_y: u32,
_base_group_z: u32,
_group_count_x: u32,
_group_count_y: u32,
_group_count_z: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_dispatch_base)))
}
let raw_name = stringify!(vkCmdDispatchBase);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_dispatch_base
} else {
::std::mem::transmute(val)
}
},
get_image_memory_requirements2: unsafe {
extern "system" fn get_image_memory_requirements2(
_device: Device,
_p_info: *const ImageMemoryRequirementsInfo2,
_p_memory_requirements: *mut MemoryRequirements2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_image_memory_requirements2)
))
}
let raw_name = stringify!(vkGetImageMemoryRequirements2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_memory_requirements2
} else {
::std::mem::transmute(val)
}
},
get_buffer_memory_requirements2: unsafe {
extern "system" fn get_buffer_memory_requirements2(
_device: Device,
_p_info: *const BufferMemoryRequirementsInfo2,
_p_memory_requirements: *mut MemoryRequirements2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_buffer_memory_requirements2)
))
}
let raw_name = stringify!(vkGetBufferMemoryRequirements2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_buffer_memory_requirements2
} else {
::std::mem::transmute(val)
}
},
get_image_sparse_memory_requirements2: unsafe {
extern "system" fn get_image_sparse_memory_requirements2(
_device: Device,
_p_info: *const ImageSparseMemoryRequirementsInfo2,
_p_sparse_memory_requirement_count: *mut u32,
_p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_image_sparse_memory_requirements2)
))
}
let raw_name = stringify!(vkGetImageSparseMemoryRequirements2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_sparse_memory_requirements2
} else {
::std::mem::transmute(val)
}
},
trim_command_pool: unsafe {
extern "system" fn trim_command_pool(
_device: Device,
_command_pool: CommandPool,
_flags: CommandPoolTrimFlags,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(trim_command_pool)))
}
let raw_name = stringify!(vkTrimCommandPool);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
trim_command_pool
} else {
::std::mem::transmute(val)
}
},
get_device_queue2: unsafe {
extern "system" fn get_device_queue2(
_device: Device,
_p_queue_info: *const DeviceQueueInfo2,
_p_queue: *mut Queue,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(get_device_queue2)))
}
let raw_name = stringify!(vkGetDeviceQueue2);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_queue2
} else {
::std::mem::transmute(val)
}
},
create_sampler_ycbcr_conversion: unsafe {
extern "system" fn create_sampler_ycbcr_conversion(
_device: Device,
_p_create_info: *const SamplerYcbcrConversionCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_ycbcr_conversion: *mut SamplerYcbcrConversion,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_sampler_ycbcr_conversion)
))
}
let raw_name = stringify!(vkCreateSamplerYcbcrConversion);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_sampler_ycbcr_conversion
} else {
::std::mem::transmute(val)
}
},
destroy_sampler_ycbcr_conversion: unsafe {
extern "system" fn destroy_sampler_ycbcr_conversion(
_device: Device,
_ycbcr_conversion: SamplerYcbcrConversion,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_sampler_ycbcr_conversion)
))
}
let raw_name = stringify!(vkDestroySamplerYcbcrConversion);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_sampler_ycbcr_conversion
} else {
::std::mem::transmute(val)
}
},
create_descriptor_update_template: unsafe {
extern "system" fn create_descriptor_update_template(
_device: Device,
_p_create_info: *const DescriptorUpdateTemplateCreateInfo,
_p_allocator: *const AllocationCallbacks,
_p_descriptor_update_template: *mut DescriptorUpdateTemplate,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_descriptor_update_template)
))
}
let raw_name = stringify!(vkCreateDescriptorUpdateTemplate);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_descriptor_update_template
} else {
::std::mem::transmute(val)
}
},
destroy_descriptor_update_template: unsafe {
extern "system" fn destroy_descriptor_update_template(
_device: Device,
_descriptor_update_template: DescriptorUpdateTemplate,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_descriptor_update_template)
))
}
let raw_name = stringify!(vkDestroyDescriptorUpdateTemplate);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_descriptor_update_template
} else {
::std::mem::transmute(val)
}
},
update_descriptor_set_with_template: unsafe {
extern "system" fn update_descriptor_set_with_template(
_device: Device,
_descriptor_set: DescriptorSet,
_descriptor_update_template: DescriptorUpdateTemplate,
_p_data: *const c_void,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(update_descriptor_set_with_template)
))
}
let raw_name = stringify!(vkUpdateDescriptorSetWithTemplate);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
update_descriptor_set_with_template
} else {
::std::mem::transmute(val)
}
},
get_descriptor_set_layout_support: unsafe {
extern "system" fn get_descriptor_set_layout_support(
_device: Device,
_p_create_info: *const DescriptorSetLayoutCreateInfo,
_p_support: *mut DescriptorSetLayoutSupport,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_descriptor_set_layout_support)
))
}
let raw_name = stringify!(vkGetDescriptorSetLayoutSupport);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_descriptor_set_layout_support
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBindBufferMemory2.html>"]
pub unsafe fn bind_buffer_memory2(
&self,
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindBufferMemoryInfo,
) -> Result {
(self.bind_buffer_memory2)(device, bind_info_count, p_bind_infos)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBindImageMemory2.html>"]
pub unsafe fn bind_image_memory2(
&self,
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindImageMemoryInfo,
) -> Result {
(self.bind_image_memory2)(device, bind_info_count, p_bind_infos)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceGroupPeerMemoryFeatures.html>"]
pub unsafe fn get_device_group_peer_memory_features(
&self,
device: Device,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
p_peer_memory_features: *mut PeerMemoryFeatureFlags,
) -> c_void {
(self.get_device_group_peer_memory_features)(
device,
heap_index,
local_device_index,
remote_device_index,
p_peer_memory_features,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetDeviceMask.html>"]
pub unsafe fn cmd_set_device_mask(
&self,
command_buffer: CommandBuffer,
device_mask: u32,
) -> c_void {
(self.cmd_set_device_mask)(command_buffer, device_mask)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDispatchBase.html>"]
pub unsafe fn cmd_dispatch_base(
&self,
command_buffer: CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) -> c_void {
(self.cmd_dispatch_base)(
command_buffer,
base_group_x,
base_group_y,
base_group_z,
group_count_x,
group_count_y,
group_count_z,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageMemoryRequirements2.html>"]
pub unsafe fn get_image_memory_requirements2(
&self,
device: Device,
p_info: *const ImageMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void {
(self.get_image_memory_requirements2)(device, p_info, p_memory_requirements)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetBufferMemoryRequirements2.html>"]
pub unsafe fn get_buffer_memory_requirements2(
&self,
device: Device,
p_info: *const BufferMemoryRequirementsInfo2,
p_memory_requirements: *mut MemoryRequirements2,
) -> c_void {
(self.get_buffer_memory_requirements2)(device, p_info, p_memory_requirements)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageSparseMemoryRequirements2.html>"]
pub unsafe fn get_image_sparse_memory_requirements2(
&self,
device: Device,
p_info: *const ImageSparseMemoryRequirementsInfo2,
p_sparse_memory_requirement_count: *mut u32,
p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
) -> c_void {
(self.get_image_sparse_memory_requirements2)(
device,
p_info,
p_sparse_memory_requirement_count,
p_sparse_memory_requirements,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkTrimCommandPool.html>"]
pub unsafe fn trim_command_pool(
&self,
device: Device,
command_pool: CommandPool,
flags: CommandPoolTrimFlags,
) -> c_void {
(self.trim_command_pool)(device, command_pool, flags)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceQueue2.html>"]
pub unsafe fn get_device_queue2(
&self,
device: Device,
p_queue_info: *const DeviceQueueInfo2,
p_queue: *mut Queue,
) -> c_void {
(self.get_device_queue2)(device, p_queue_info, p_queue)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateSamplerYcbcrConversion.html>"]
pub unsafe fn create_sampler_ycbcr_conversion(
&self,
device: Device,
p_create_info: *const SamplerYcbcrConversionCreateInfo,
p_allocator: *const AllocationCallbacks,
p_ycbcr_conversion: *mut SamplerYcbcrConversion,
) -> Result {
(self.create_sampler_ycbcr_conversion)(
device,
p_create_info,
p_allocator,
p_ycbcr_conversion,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroySamplerYcbcrConversion.html>"]
pub unsafe fn destroy_sampler_ycbcr_conversion(
&self,
device: Device,
ycbcr_conversion: SamplerYcbcrConversion,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_sampler_ycbcr_conversion)(device, ycbcr_conversion, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDescriptorUpdateTemplate.html>"]
pub unsafe fn create_descriptor_update_template(
&self,
device: Device,
p_create_info: *const DescriptorUpdateTemplateCreateInfo,
p_allocator: *const AllocationCallbacks,
p_descriptor_update_template: *mut DescriptorUpdateTemplate,
) -> Result {
(self.create_descriptor_update_template)(
device,
p_create_info,
p_allocator,
p_descriptor_update_template,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDescriptorUpdateTemplate.html>"]
pub unsafe fn destroy_descriptor_update_template(
&self,
device: Device,
descriptor_update_template: DescriptorUpdateTemplate,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_descriptor_update_template)(device, descriptor_update_template, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkUpdateDescriptorSetWithTemplate.html>"]
pub unsafe fn update_descriptor_set_with_template(
&self,
device: Device,
descriptor_set: DescriptorSet,
descriptor_update_template: DescriptorUpdateTemplate,
p_data: *const c_void,
) -> c_void {
(self.update_descriptor_set_with_template)(
device,
descriptor_set,
descriptor_update_template,
p_data,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDescriptorSetLayoutSupport.html>"]
pub unsafe fn get_descriptor_set_layout_support(
&self,
device: Device,
p_create_info: *const DescriptorSetLayoutCreateInfo,
p_support: *mut DescriptorSetLayoutSupport,
) -> c_void {
(self.get_descriptor_set_layout_support)(device, p_create_info, p_support)
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSampleMask.html>"]
pub type SampleMask = u32;
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBool32.html>"]
pub type Bool32 = u32;
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFlags.html>"]
pub type Flags = u32;
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceSize.html>"]
pub type DeviceSize = u64;
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceAddress.html>"]
pub type DeviceAddress = u64;
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFramebufferCreateFlags.html>"]
pub struct FramebufferCreateFlags(Flags);
vk_bitflags_wrapped!(FramebufferCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryPoolCreateFlags.html>"]
pub struct QueryPoolCreateFlags(Flags);
vk_bitflags_wrapped!(QueryPoolCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineLayoutCreateFlags.html>"]
pub struct PipelineLayoutCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineLayoutCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCacheCreateFlags.html>"]
pub struct PipelineCacheCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineCacheCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDepthStencilStateCreateFlags.html>"]
pub struct PipelineDepthStencilStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineDepthStencilStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDynamicStateCreateFlags.html>"]
pub struct PipelineDynamicStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineDynamicStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineColorBlendStateCreateFlags.html>"]
pub struct PipelineColorBlendStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineColorBlendStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineMultisampleStateCreateFlags.html>"]
pub struct PipelineMultisampleStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineMultisampleStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationStateCreateFlags.html>"]
pub struct PipelineRasterizationStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineRasterizationStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportStateCreateFlags.html>"]
pub struct PipelineViewportStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineViewportStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineTessellationStateCreateFlags.html>"]
pub struct PipelineTessellationStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineTessellationStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineInputAssemblyStateCreateFlags.html>"]
pub struct PipelineInputAssemblyStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineInputAssemblyStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineVertexInputStateCreateFlags.html>"]
pub struct PipelineVertexInputStateCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineVertexInputStateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineShaderStageCreateFlags.html>"]
pub struct PipelineShaderStageCreateFlags(Flags);
vk_bitflags_wrapped!(PipelineShaderStageCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferViewCreateFlags.html>"]
pub struct BufferViewCreateFlags(Flags);
vk_bitflags_wrapped!(BufferViewCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkInstanceCreateFlags.html>"]
pub struct InstanceCreateFlags(Flags);
vk_bitflags_wrapped!(InstanceCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceCreateFlags.html>"]
pub struct DeviceCreateFlags(Flags);
vk_bitflags_wrapped!(DeviceCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphoreCreateFlags.html>"]
pub struct SemaphoreCreateFlags(Flags);
vk_bitflags_wrapped!(SemaphoreCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderModuleCreateFlags.html>"]
pub struct ShaderModuleCreateFlags(Flags);
vk_bitflags_wrapped!(ShaderModuleCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkEventCreateFlags.html>"]
pub struct EventCreateFlags(Flags);
vk_bitflags_wrapped!(EventCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryMapFlags.html>"]
pub struct MemoryMapFlags(Flags);
vk_bitflags_wrapped!(MemoryMapFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPoolResetFlags.html>"]
pub struct DescriptorPoolResetFlags(Flags);
vk_bitflags_wrapped!(DescriptorPoolResetFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorUpdateTemplateCreateFlags.html>"]
pub struct DescriptorUpdateTemplateCreateFlags(Flags);
vk_bitflags_wrapped!(DescriptorUpdateTemplateCreateFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModeCreateFlagsKHR.html>"]
pub struct DisplayModeCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(DisplayModeCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplaySurfaceCreateFlagsKHR.html>"]
pub struct DisplaySurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(DisplaySurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAndroidSurfaceCreateFlagsKHR.html>"]
pub struct AndroidSurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(AndroidSurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViSurfaceCreateFlagsNN.html>"]
pub struct ViSurfaceCreateFlagsNN(Flags);
vk_bitflags_wrapped!(ViSurfaceCreateFlagsNN, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWaylandSurfaceCreateFlagsKHR.html>"]
pub struct WaylandSurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(WaylandSurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWin32SurfaceCreateFlagsKHR.html>"]
pub struct Win32SurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(Win32SurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkXlibSurfaceCreateFlagsKHR.html>"]
pub struct XlibSurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(XlibSurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkXcbSurfaceCreateFlagsKHR.html>"]
pub struct XcbSurfaceCreateFlagsKHR(Flags);
vk_bitflags_wrapped!(XcbSurfaceCreateFlagsKHR, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIOSSurfaceCreateFlagsMVK.html>"]
pub struct IOSSurfaceCreateFlagsMVK(Flags);
vk_bitflags_wrapped!(IOSSurfaceCreateFlagsMVK, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMacOSSurfaceCreateFlagsMVK.html>"]
pub struct MacOSSurfaceCreateFlagsMVK(Flags);
vk_bitflags_wrapped!(MacOSSurfaceCreateFlagsMVK, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImagePipeSurfaceCreateFlagsFUCHSIA.html>"]
pub struct ImagePipeSurfaceCreateFlagsFUCHSIA(Flags);
vk_bitflags_wrapped!(ImagePipeSurfaceCreateFlagsFUCHSIA, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandPoolTrimFlags.html>"]
pub struct CommandPoolTrimFlags(Flags);
vk_bitflags_wrapped!(CommandPoolTrimFlags, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportSwizzleStateCreateFlagsNV.html>"]
pub struct PipelineViewportSwizzleStateCreateFlagsNV(Flags);
vk_bitflags_wrapped!(PipelineViewportSwizzleStateCreateFlagsNV, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDiscardRectangleStateCreateFlagsEXT.html>"]
pub struct PipelineDiscardRectangleStateCreateFlagsEXT(Flags);
vk_bitflags_wrapped!(PipelineDiscardRectangleStateCreateFlagsEXT, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCoverageToColorStateCreateFlagsNV.html>"]
pub struct PipelineCoverageToColorStateCreateFlagsNV(Flags);
vk_bitflags_wrapped!(PipelineCoverageToColorStateCreateFlagsNV, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCoverageModulationStateCreateFlagsNV.html>"]
pub struct PipelineCoverageModulationStateCreateFlagsNV(Flags);
vk_bitflags_wrapped!(PipelineCoverageModulationStateCreateFlagsNV, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationCacheCreateFlagsEXT.html>"]
pub struct ValidationCacheCreateFlagsEXT(Flags);
vk_bitflags_wrapped!(ValidationCacheCreateFlagsEXT, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessengerCreateFlagsEXT.html>"]
pub struct DebugUtilsMessengerCreateFlagsEXT(Flags);
vk_bitflags_wrapped!(DebugUtilsMessengerCreateFlagsEXT, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessengerCallbackDataFlagsEXT.html>"]
pub struct DebugUtilsMessengerCallbackDataFlagsEXT(Flags);
vk_bitflags_wrapped!(DebugUtilsMessengerCallbackDataFlagsEXT, 0b0, Flags);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationConservativeStateCreateFlagsEXT.html>"]
pub struct PipelineRasterizationConservativeStateCreateFlagsEXT(Flags);
vk_bitflags_wrapped!(
PipelineRasterizationConservativeStateCreateFlagsEXT,
0b0,
Flags
);
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationStateStreamCreateFlagsEXT.html>"]
pub struct PipelineRasterizationStateStreamCreateFlagsEXT(Flags);
vk_bitflags_wrapped!(PipelineRasterizationStateStreamCreateFlagsEXT, 0b0, Flags);
define_handle!(
Instance,
INSTANCE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkInstance.html>"
);
define_handle ! ( PhysicalDevice , PHYSICAL_DEVICE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevice.html>" ) ;
define_handle!(
Device,
DEVICE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDevice.html>"
);
define_handle!(
Queue,
QUEUE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueue.html>"
);
define_handle ! ( CommandBuffer , COMMAND_BUFFER , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBuffer.html>" ) ;
handle_nondispatchable ! ( DeviceMemory , DEVICE_MEMORY , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceMemory.html>" ) ;
handle_nondispatchable ! ( CommandPool , COMMAND_POOL , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandPool.html>" ) ;
handle_nondispatchable!(
Buffer,
BUFFER,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBuffer.html>"
);
handle_nondispatchable!(
BufferView,
BUFFER_VIEW,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferView.html>"
);
handle_nondispatchable!(
Image,
IMAGE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImage.html>"
);
handle_nondispatchable!(
ImageView,
IMAGE_VIEW,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageView.html>"
);
handle_nondispatchable ! ( ShaderModule , SHADER_MODULE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderModule.html>" ) ;
handle_nondispatchable!(
Pipeline,
PIPELINE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipeline.html>"
);
handle_nondispatchable ! ( PipelineLayout , PIPELINE_LAYOUT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineLayout.html>" ) ;
handle_nondispatchable!(
Sampler,
SAMPLER,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSampler.html>"
);
handle_nondispatchable ! ( DescriptorSet , DESCRIPTOR_SET , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSet.html>" ) ;
handle_nondispatchable ! ( DescriptorSetLayout , DESCRIPTOR_SET_LAYOUT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayout.html>" ) ;
handle_nondispatchable ! ( DescriptorPool , DESCRIPTOR_POOL , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPool.html>" ) ;
handle_nondispatchable!(
Fence,
FENCE,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFence.html>"
);
handle_nondispatchable!(
Semaphore,
SEMAPHORE,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphore.html>"
);
handle_nondispatchable!(
Event,
EVENT,
doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkEvent.html>"
);
handle_nondispatchable!(
QueryPool,
QUERY_POOL,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryPool.html>"
);
handle_nondispatchable ! ( Framebuffer , FRAMEBUFFER , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFramebuffer.html>" ) ;
handle_nondispatchable!(
RenderPass,
RENDER_PASS,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPass.html>"
);
handle_nondispatchable ! ( PipelineCache , PIPELINE_CACHE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCache.html>" ) ;
handle_nondispatchable ! ( ObjectTableNVX , OBJECT_TABLE_NVX , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableNVX.html>" ) ;
handle_nondispatchable ! ( IndirectCommandsLayoutNVX , INDIRECT_COMMANDS_LAYOUT_NVX , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsLayoutNVX.html>" ) ;
handle_nondispatchable ! ( DescriptorUpdateTemplate , DESCRIPTOR_UPDATE_TEMPLATE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorUpdateTemplate.html>" ) ;
handle_nondispatchable ! ( SamplerYcbcrConversion , SAMPLER_YCBCR_CONVERSION , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrConversion.html>" ) ;
handle_nondispatchable ! ( ValidationCacheEXT , VALIDATION_CACHE_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationCacheEXT.html>" ) ;
handle_nondispatchable ! ( AccelerationStructureNV , ACCELERATION_STRUCTURE_NV , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureNV.html>" ) ;
handle_nondispatchable!(
DisplayKHR,
DISPLAY_KHR,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayKHR.html>"
);
handle_nondispatchable ! ( DisplayModeKHR , DISPLAY_MODE_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModeKHR.html>" ) ;
handle_nondispatchable!(
SurfaceKHR,
SURFACE_KHR,
doc =
"<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceKHR.html>"
);
handle_nondispatchable ! ( SwapchainKHR , SWAPCHAIN_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSwapchainKHR.html>" ) ;
handle_nondispatchable ! ( DebugReportCallbackEXT , DEBUG_REPORT_CALLBACK_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugReportCallbackEXT.html>" ) ;
handle_nondispatchable ! ( DebugUtilsMessengerEXT , DEBUG_UTILS_MESSENGER_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessengerEXT.html>" ) ;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkInternalAllocationNotification.html>"]
pub type PFN_vkInternalAllocationNotification = Option<
unsafe extern "system" fn(
p_user_data: *mut c_void,
size: usize,
allocation_type: InternalAllocationType,
allocation_scope: SystemAllocationScope,
) -> c_void,
>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkInternalFreeNotification.html>"]
pub type PFN_vkInternalFreeNotification = Option<
unsafe extern "system" fn(
p_user_data: *mut c_void,
size: usize,
allocation_type: InternalAllocationType,
allocation_scope: SystemAllocationScope,
) -> c_void,
>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkReallocationFunction.html>"]
pub type PFN_vkReallocationFunction = Option<
unsafe extern "system" fn(
p_user_data: *mut c_void,
p_original: *mut c_void,
size: usize,
alignment: usize,
allocation_scope: SystemAllocationScope,
) -> *mut c_void,
>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkAllocationFunction.html>"]
pub type PFN_vkAllocationFunction = Option<
unsafe extern "system" fn(
p_user_data: *mut c_void,
size: usize,
alignment: usize,
allocation_scope: SystemAllocationScope,
) -> *mut c_void,
>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkFreeFunction.html>"]
pub type PFN_vkFreeFunction =
Option<unsafe extern "system" fn(p_user_data: *mut c_void, p_memory: *mut c_void) -> c_void>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkVoidFunction.html>"]
pub type PFN_vkVoidFunction = Option<unsafe extern "system" fn() -> c_void>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkDebugReportCallbackEXT.html>"]
pub type PFN_vkDebugReportCallbackEXT = Option<
unsafe extern "system" fn(
flags: DebugReportFlagsEXT,
object_type: DebugReportObjectTypeEXT,
object: u64,
location: usize,
message_code: i32,
p_layer_prefix: *const c_char,
p_message: *const c_char,
p_user_data: *mut c_void,
) -> Bool32,
>;
#[allow(non_camel_case_types)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/PFN_vkDebugUtilsMessengerCallbackEXT.html>"]
pub type PFN_vkDebugUtilsMessengerCallbackEXT = Option<
unsafe extern "system" fn(
message_severity: DebugUtilsMessageSeverityFlagsEXT,
message_types: DebugUtilsMessageTypeFlagsEXT,
p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
p_user_data: *mut c_void,
) -> Bool32,
>;
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBaseOutStructure.html>"]
pub struct BaseOutStructure {
pub s_type: StructureType,
pub p_next: *mut BaseOutStructure,
}
impl ::std::default::Default for BaseOutStructure {
fn default() -> BaseOutStructure {
BaseOutStructure {
s_type: unsafe { ::std::mem::zeroed() },
p_next: ::std::ptr::null_mut(),
}
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBaseInStructure.html>"]
pub struct BaseInStructure {
pub s_type: StructureType,
pub p_next: *const BaseInStructure,
}
impl ::std::default::Default for BaseInStructure {
fn default() -> BaseInStructure {
BaseInStructure {
s_type: unsafe { ::std::mem::zeroed() },
p_next: ::std::ptr::null(),
}
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkOffset2D.html>"]
pub struct Offset2D {
pub x: i32,
pub y: i32,
}
impl Offset2D {
pub fn builder<'a>() -> Offset2DBuilder<'a> {
Offset2DBuilder {
inner: Offset2D::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Offset2DBuilder<'a> {
inner: Offset2D,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for Offset2DBuilder<'a> {
type Target = Offset2D;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Offset2DBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Offset2DBuilder<'a> {
pub fn x(mut self, x: i32) -> Offset2DBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: i32) -> Offset2DBuilder<'a> {
self.inner.y = y;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Offset2D {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkOffset3D.html>"]
pub struct Offset3D {
pub x: i32,
pub y: i32,
pub z: i32,
}
impl Offset3D {
pub fn builder<'a>() -> Offset3DBuilder<'a> {
Offset3DBuilder {
inner: Offset3D::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Offset3DBuilder<'a> {
inner: Offset3D,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for Offset3DBuilder<'a> {
type Target = Offset3D;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Offset3DBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Offset3DBuilder<'a> {
pub fn x(mut self, x: i32) -> Offset3DBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: i32) -> Offset3DBuilder<'a> {
self.inner.y = y;
self
}
pub fn z(mut self, z: i32) -> Offset3DBuilder<'a> {
self.inner.z = z;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Offset3D {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExtent2D.html>"]
pub struct Extent2D {
pub width: u32,
pub height: u32,
}
impl Extent2D {
pub fn builder<'a>() -> Extent2DBuilder<'a> {
Extent2DBuilder {
inner: Extent2D::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Extent2DBuilder<'a> {
inner: Extent2D,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for Extent2DBuilder<'a> {
type Target = Extent2D;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Extent2DBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Extent2DBuilder<'a> {
pub fn width(mut self, width: u32) -> Extent2DBuilder<'a> {
self.inner.width = width;
self
}
pub fn height(mut self, height: u32) -> Extent2DBuilder<'a> {
self.inner.height = height;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Extent2D {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExtent3D.html>"]
pub struct Extent3D {
pub width: u32,
pub height: u32,
pub depth: u32,
}
impl Extent3D {
pub fn builder<'a>() -> Extent3DBuilder<'a> {
Extent3DBuilder {
inner: Extent3D::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Extent3DBuilder<'a> {
inner: Extent3D,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for Extent3DBuilder<'a> {
type Target = Extent3D;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Extent3DBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Extent3DBuilder<'a> {
pub fn width(mut self, width: u32) -> Extent3DBuilder<'a> {
self.inner.width = width;
self
}
pub fn height(mut self, height: u32) -> Extent3DBuilder<'a> {
self.inner.height = height;
self
}
pub fn depth(mut self, depth: u32) -> Extent3DBuilder<'a> {
self.inner.depth = depth;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Extent3D {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViewport.html>"]
pub struct Viewport {
pub x: f32,
pub y: f32,
pub width: f32,
pub height: f32,
pub min_depth: f32,
pub max_depth: f32,
}
impl Viewport {
pub fn builder<'a>() -> ViewportBuilder<'a> {
ViewportBuilder {
inner: Viewport::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ViewportBuilder<'a> {
inner: Viewport,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ViewportBuilder<'a> {
type Target = Viewport;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ViewportBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ViewportBuilder<'a> {
pub fn x(mut self, x: f32) -> ViewportBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: f32) -> ViewportBuilder<'a> {
self.inner.y = y;
self
}
pub fn width(mut self, width: f32) -> ViewportBuilder<'a> {
self.inner.width = width;
self
}
pub fn height(mut self, height: f32) -> ViewportBuilder<'a> {
self.inner.height = height;
self
}
pub fn min_depth(mut self, min_depth: f32) -> ViewportBuilder<'a> {
self.inner.min_depth = min_depth;
self
}
pub fn max_depth(mut self, max_depth: f32) -> ViewportBuilder<'a> {
self.inner.max_depth = max_depth;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Viewport {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRect2D.html>"]
pub struct Rect2D {
pub offset: Offset2D,
pub extent: Extent2D,
}
impl Rect2D {
pub fn builder<'a>() -> Rect2DBuilder<'a> {
Rect2DBuilder {
inner: Rect2D::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Rect2DBuilder<'a> {
inner: Rect2D,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for Rect2DBuilder<'a> {
type Target = Rect2D;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Rect2DBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Rect2DBuilder<'a> {
pub fn offset(mut self, offset: Offset2D) -> Rect2DBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn extent(mut self, extent: Extent2D) -> Rect2DBuilder<'a> {
self.inner.extent = extent;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Rect2D {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkClearRect.html>"]
pub struct ClearRect {
pub rect: Rect2D,
pub base_array_layer: u32,
pub layer_count: u32,
}
impl ClearRect {
pub fn builder<'a>() -> ClearRectBuilder<'a> {
ClearRectBuilder {
inner: ClearRect::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ClearRectBuilder<'a> {
inner: ClearRect,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ClearRectBuilder<'a> {
type Target = ClearRect;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ClearRectBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ClearRectBuilder<'a> {
pub fn rect(mut self, rect: Rect2D) -> ClearRectBuilder<'a> {
self.inner.rect = rect;
self
}
pub fn base_array_layer(mut self, base_array_layer: u32) -> ClearRectBuilder<'a> {
self.inner.base_array_layer = base_array_layer;
self
}
pub fn layer_count(mut self, layer_count: u32) -> ClearRectBuilder<'a> {
self.inner.layer_count = layer_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ClearRect {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkComponentMapping.html>"]
pub struct ComponentMapping {
pub r: ComponentSwizzle,
pub g: ComponentSwizzle,
pub b: ComponentSwizzle,
pub a: ComponentSwizzle,
}
impl ComponentMapping {
pub fn builder<'a>() -> ComponentMappingBuilder<'a> {
ComponentMappingBuilder {
inner: ComponentMapping::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ComponentMappingBuilder<'a> {
inner: ComponentMapping,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ComponentMappingBuilder<'a> {
type Target = ComponentMapping;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ComponentMappingBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ComponentMappingBuilder<'a> {
pub fn r(mut self, r: ComponentSwizzle) -> ComponentMappingBuilder<'a> {
self.inner.r = r;
self
}
pub fn g(mut self, g: ComponentSwizzle) -> ComponentMappingBuilder<'a> {
self.inner.g = g;
self
}
pub fn b(mut self, b: ComponentSwizzle) -> ComponentMappingBuilder<'a> {
self.inner.b = b;
self
}
pub fn a(mut self, a: ComponentSwizzle) -> ComponentMappingBuilder<'a> {
self.inner.a = a;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ComponentMapping {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceProperties.html>"]
pub struct PhysicalDeviceProperties {
pub api_version: u32,
pub driver_version: u32,
pub vendor_id: u32,
pub device_id: u32,
pub device_type: PhysicalDeviceType,
pub device_name: [c_char; MAX_PHYSICAL_DEVICE_NAME_SIZE],
pub pipeline_cache_uuid: [u8; UUID_SIZE],
pub limits: PhysicalDeviceLimits,
pub sparse_properties: PhysicalDeviceSparseProperties,
}
impl fmt::Debug for PhysicalDeviceProperties {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("PhysicalDeviceProperties")
.field("api_version", &self.api_version)
.field("driver_version", &self.driver_version)
.field("vendor_id", &self.vendor_id)
.field("device_id", &self.device_id)
.field("device_type", &self.device_type)
.field("device_name", &unsafe {
::std::ffi::CStr::from_ptr(self.device_name.as_ptr() as *const c_char)
})
.field("pipeline_cache_uuid", &self.pipeline_cache_uuid)
.field("limits", &self.limits)
.field("sparse_properties", &self.sparse_properties)
.finish()
}
}
impl ::std::default::Default for PhysicalDeviceProperties {
fn default() -> PhysicalDeviceProperties {
PhysicalDeviceProperties {
api_version: u32::default(),
driver_version: u32::default(),
vendor_id: u32::default(),
device_id: u32::default(),
device_type: PhysicalDeviceType::default(),
device_name: unsafe { ::std::mem::zeroed() },
pipeline_cache_uuid: unsafe { ::std::mem::zeroed() },
limits: PhysicalDeviceLimits::default(),
sparse_properties: PhysicalDeviceSparseProperties::default(),
}
}
}
impl PhysicalDeviceProperties {
pub fn builder<'a>() -> PhysicalDevicePropertiesBuilder<'a> {
PhysicalDevicePropertiesBuilder {
inner: PhysicalDeviceProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevicePropertiesBuilder<'a> {
inner: PhysicalDeviceProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PhysicalDevicePropertiesBuilder<'a> {
type Target = PhysicalDeviceProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevicePropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevicePropertiesBuilder<'a> {
pub fn api_version(mut self, api_version: u32) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.api_version = api_version;
self
}
pub fn driver_version(mut self, driver_version: u32) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.driver_version = driver_version;
self
}
pub fn vendor_id(mut self, vendor_id: u32) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.vendor_id = vendor_id;
self
}
pub fn device_id(mut self, device_id: u32) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.device_id = device_id;
self
}
pub fn device_type(
mut self,
device_type: PhysicalDeviceType,
) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.device_type = device_type;
self
}
pub fn device_name(
mut self,
device_name: [c_char; MAX_PHYSICAL_DEVICE_NAME_SIZE],
) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.device_name = device_name;
self
}
pub fn pipeline_cache_uuid(
mut self,
pipeline_cache_uuid: [u8; UUID_SIZE],
) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.pipeline_cache_uuid = pipeline_cache_uuid;
self
}
pub fn limits(mut self, limits: PhysicalDeviceLimits) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.limits = limits;
self
}
pub fn sparse_properties(
mut self,
sparse_properties: PhysicalDeviceSparseProperties,
) -> PhysicalDevicePropertiesBuilder<'a> {
self.inner.sparse_properties = sparse_properties;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExtensionProperties.html>"]
pub struct ExtensionProperties {
pub extension_name: [c_char; MAX_EXTENSION_NAME_SIZE],
pub spec_version: u32,
}
impl fmt::Debug for ExtensionProperties {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("ExtensionProperties")
.field("extension_name", &unsafe {
::std::ffi::CStr::from_ptr(self.extension_name.as_ptr() as *const c_char)
})
.field("spec_version", &self.spec_version)
.finish()
}
}
impl ::std::default::Default for ExtensionProperties {
fn default() -> ExtensionProperties {
ExtensionProperties {
extension_name: unsafe { ::std::mem::zeroed() },
spec_version: u32::default(),
}
}
}
impl ExtensionProperties {
pub fn builder<'a>() -> ExtensionPropertiesBuilder<'a> {
ExtensionPropertiesBuilder {
inner: ExtensionProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExtensionPropertiesBuilder<'a> {
inner: ExtensionProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ExtensionPropertiesBuilder<'a> {
type Target = ExtensionProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExtensionPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExtensionPropertiesBuilder<'a> {
pub fn extension_name(
mut self,
extension_name: [c_char; MAX_EXTENSION_NAME_SIZE],
) -> ExtensionPropertiesBuilder<'a> {
self.inner.extension_name = extension_name;
self
}
pub fn spec_version(mut self, spec_version: u32) -> ExtensionPropertiesBuilder<'a> {
self.inner.spec_version = spec_version;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExtensionProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkLayerProperties.html>"]
pub struct LayerProperties {
pub layer_name: [c_char; MAX_EXTENSION_NAME_SIZE],
pub spec_version: u32,
pub implementation_version: u32,
pub description: [c_char; MAX_DESCRIPTION_SIZE],
}
impl fmt::Debug for LayerProperties {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("LayerProperties")
.field("layer_name", &unsafe {
::std::ffi::CStr::from_ptr(self.layer_name.as_ptr() as *const c_char)
})
.field("spec_version", &self.spec_version)
.field("implementation_version", &self.implementation_version)
.field("description", &unsafe {
::std::ffi::CStr::from_ptr(self.description.as_ptr() as *const c_char)
})
.finish()
}
}
impl ::std::default::Default for LayerProperties {
fn default() -> LayerProperties {
LayerProperties {
layer_name: unsafe { ::std::mem::zeroed() },
spec_version: u32::default(),
implementation_version: u32::default(),
description: unsafe { ::std::mem::zeroed() },
}
}
}
impl LayerProperties {
pub fn builder<'a>() -> LayerPropertiesBuilder<'a> {
LayerPropertiesBuilder {
inner: LayerProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct LayerPropertiesBuilder<'a> {
inner: LayerProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for LayerPropertiesBuilder<'a> {
type Target = LayerProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for LayerPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> LayerPropertiesBuilder<'a> {
pub fn layer_name(
mut self,
layer_name: [c_char; MAX_EXTENSION_NAME_SIZE],
) -> LayerPropertiesBuilder<'a> {
self.inner.layer_name = layer_name;
self
}
pub fn spec_version(mut self, spec_version: u32) -> LayerPropertiesBuilder<'a> {
self.inner.spec_version = spec_version;
self
}
pub fn implementation_version(
mut self,
implementation_version: u32,
) -> LayerPropertiesBuilder<'a> {
self.inner.implementation_version = implementation_version;
self
}
pub fn description(
mut self,
description: [c_char; MAX_DESCRIPTION_SIZE],
) -> LayerPropertiesBuilder<'a> {
self.inner.description = description;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> LayerProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkApplicationInfo.html>"]
pub struct ApplicationInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_application_name: *const c_char,
pub application_version: u32,
pub p_engine_name: *const c_char,
pub engine_version: u32,
pub api_version: u32,
}
impl ::std::default::Default for ApplicationInfo {
fn default() -> ApplicationInfo {
ApplicationInfo {
s_type: StructureType::APPLICATION_INFO,
p_next: ::std::ptr::null(),
p_application_name: ::std::ptr::null(),
application_version: u32::default(),
p_engine_name: ::std::ptr::null(),
engine_version: u32::default(),
api_version: u32::default(),
}
}
}
impl ApplicationInfo {
pub fn builder<'a>() -> ApplicationInfoBuilder<'a> {
ApplicationInfoBuilder {
inner: ApplicationInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ApplicationInfoBuilder<'a> {
inner: ApplicationInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsApplicationInfo {}
impl<'a> ::std::ops::Deref for ApplicationInfoBuilder<'a> {
type Target = ApplicationInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ApplicationInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ApplicationInfoBuilder<'a> {
pub fn application_name(
mut self,
application_name: &'a ::std::ffi::CStr,
) -> ApplicationInfoBuilder<'a> {
self.inner.p_application_name = application_name.as_ptr();
self
}
pub fn application_version(mut self, application_version: u32) -> ApplicationInfoBuilder<'a> {
self.inner.application_version = application_version;
self
}
pub fn engine_name(mut self, engine_name: &'a ::std::ffi::CStr) -> ApplicationInfoBuilder<'a> {
self.inner.p_engine_name = engine_name.as_ptr();
self
}
pub fn engine_version(mut self, engine_version: u32) -> ApplicationInfoBuilder<'a> {
self.inner.engine_version = engine_version;
self
}
pub fn api_version(mut self, api_version: u32) -> ApplicationInfoBuilder<'a> {
self.inner.api_version = api_version;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsApplicationInfo>(
mut self,
next: &'a mut T,
) -> ApplicationInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ApplicationInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAllocationCallbacks.html>"]
pub struct AllocationCallbacks {
pub p_user_data: *mut c_void,
pub pfn_allocation: PFN_vkAllocationFunction,
pub pfn_reallocation: PFN_vkReallocationFunction,
pub pfn_free: PFN_vkFreeFunction,
pub pfn_internal_allocation: PFN_vkInternalAllocationNotification,
pub pfn_internal_free: PFN_vkInternalFreeNotification,
}
impl fmt::Debug for AllocationCallbacks {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("AllocationCallbacks")
.field("p_user_data", &self.p_user_data)
.field(
"pfn_allocation",
&(self.pfn_allocation.map(|x| x as *const ())),
)
.field(
"pfn_reallocation",
&(self.pfn_reallocation.map(|x| x as *const ())),
)
.field("pfn_free", &(self.pfn_free.map(|x| x as *const ())))
.field(
"pfn_internal_allocation",
&(self.pfn_internal_allocation.map(|x| x as *const ())),
)
.field(
"pfn_internal_free",
&(self.pfn_internal_free.map(|x| x as *const ())),
)
.finish()
}
}
impl ::std::default::Default for AllocationCallbacks {
fn default() -> AllocationCallbacks {
AllocationCallbacks {
p_user_data: ::std::ptr::null_mut(),
pfn_allocation: PFN_vkAllocationFunction::default(),
pfn_reallocation: PFN_vkReallocationFunction::default(),
pfn_free: PFN_vkFreeFunction::default(),
pfn_internal_allocation: PFN_vkInternalAllocationNotification::default(),
pfn_internal_free: PFN_vkInternalFreeNotification::default(),
}
}
}
impl AllocationCallbacks {
pub fn builder<'a>() -> AllocationCallbacksBuilder<'a> {
AllocationCallbacksBuilder {
inner: AllocationCallbacks::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AllocationCallbacksBuilder<'a> {
inner: AllocationCallbacks,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for AllocationCallbacksBuilder<'a> {
type Target = AllocationCallbacks;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AllocationCallbacksBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AllocationCallbacksBuilder<'a> {
pub fn user_data(mut self, user_data: *mut c_void) -> AllocationCallbacksBuilder<'a> {
self.inner.p_user_data = user_data;
self
}
pub fn pfn_allocation(
mut self,
pfn_allocation: PFN_vkAllocationFunction,
) -> AllocationCallbacksBuilder<'a> {
self.inner.pfn_allocation = pfn_allocation;
self
}
pub fn pfn_reallocation(
mut self,
pfn_reallocation: PFN_vkReallocationFunction,
) -> AllocationCallbacksBuilder<'a> {
self.inner.pfn_reallocation = pfn_reallocation;
self
}
pub fn pfn_free(mut self, pfn_free: PFN_vkFreeFunction) -> AllocationCallbacksBuilder<'a> {
self.inner.pfn_free = pfn_free;
self
}
pub fn pfn_internal_allocation(
mut self,
pfn_internal_allocation: PFN_vkInternalAllocationNotification,
) -> AllocationCallbacksBuilder<'a> {
self.inner.pfn_internal_allocation = pfn_internal_allocation;
self
}
pub fn pfn_internal_free(
mut self,
pfn_internal_free: PFN_vkInternalFreeNotification,
) -> AllocationCallbacksBuilder<'a> {
self.inner.pfn_internal_free = pfn_internal_free;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AllocationCallbacks {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceQueueCreateInfo.html>"]
pub struct DeviceQueueCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DeviceQueueCreateFlags,
pub queue_family_index: u32,
pub queue_count: u32,
pub p_queue_priorities: *const f32,
}
impl ::std::default::Default for DeviceQueueCreateInfo {
fn default() -> DeviceQueueCreateInfo {
DeviceQueueCreateInfo {
s_type: StructureType::DEVICE_QUEUE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: DeviceQueueCreateFlags::default(),
queue_family_index: u32::default(),
queue_count: u32::default(),
p_queue_priorities: ::std::ptr::null(),
}
}
}
impl DeviceQueueCreateInfo {
pub fn builder<'a>() -> DeviceQueueCreateInfoBuilder<'a> {
DeviceQueueCreateInfoBuilder {
inner: DeviceQueueCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceQueueCreateInfoBuilder<'a> {
inner: DeviceQueueCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceQueueCreateInfo {}
impl<'a> ::std::ops::Deref for DeviceQueueCreateInfoBuilder<'a> {
type Target = DeviceQueueCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceQueueCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceQueueCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> DeviceQueueCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn queue_family_index(
mut self,
queue_family_index: u32,
) -> DeviceQueueCreateInfoBuilder<'a> {
self.inner.queue_family_index = queue_family_index;
self
}
pub fn queue_priorities(
mut self,
queue_priorities: &'a [f32],
) -> DeviceQueueCreateInfoBuilder<'a> {
self.inner.queue_count = queue_priorities.len() as _;
self.inner.p_queue_priorities = queue_priorities.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceQueueCreateInfo>(
mut self,
next: &'a mut T,
) -> DeviceQueueCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceQueueCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceCreateInfo.html>"]
pub struct DeviceCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DeviceCreateFlags,
pub queue_create_info_count: u32,
pub p_queue_create_infos: *const DeviceQueueCreateInfo,
pub enabled_layer_count: u32,
pub pp_enabled_layer_names: *const *const c_char,
pub enabled_extension_count: u32,
pub pp_enabled_extension_names: *const *const c_char,
pub p_enabled_features: *const PhysicalDeviceFeatures,
}
impl ::std::default::Default for DeviceCreateInfo {
fn default() -> DeviceCreateInfo {
DeviceCreateInfo {
s_type: StructureType::DEVICE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: DeviceCreateFlags::default(),
queue_create_info_count: u32::default(),
p_queue_create_infos: ::std::ptr::null(),
enabled_layer_count: u32::default(),
pp_enabled_layer_names: ::std::ptr::null(),
enabled_extension_count: u32::default(),
pp_enabled_extension_names: ::std::ptr::null(),
p_enabled_features: ::std::ptr::null(),
}
}
}
impl DeviceCreateInfo {
pub fn builder<'a>() -> DeviceCreateInfoBuilder<'a> {
DeviceCreateInfoBuilder {
inner: DeviceCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceCreateInfoBuilder<'a> {
inner: DeviceCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceCreateInfo {}
impl<'a> ::std::ops::Deref for DeviceCreateInfoBuilder<'a> {
type Target = DeviceCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: DeviceCreateFlags) -> DeviceCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn queue_create_infos(
mut self,
queue_create_infos: &'a [DeviceQueueCreateInfo],
) -> DeviceCreateInfoBuilder<'a> {
self.inner.queue_create_info_count = queue_create_infos.len() as _;
self.inner.p_queue_create_infos = queue_create_infos.as_ptr();
self
}
pub fn enabled_layer_names(
mut self,
enabled_layer_names: &'a [*const c_char],
) -> DeviceCreateInfoBuilder<'a> {
self.inner.pp_enabled_layer_names = enabled_layer_names.as_ptr();
self.inner.enabled_layer_count = enabled_layer_names.len() as _;
self
}
pub fn enabled_extension_names(
mut self,
enabled_extension_names: &'a [*const c_char],
) -> DeviceCreateInfoBuilder<'a> {
self.inner.pp_enabled_extension_names = enabled_extension_names.as_ptr();
self.inner.enabled_extension_count = enabled_extension_names.len() as _;
self
}
pub fn enabled_features(
mut self,
enabled_features: &'a PhysicalDeviceFeatures,
) -> DeviceCreateInfoBuilder<'a> {
self.inner.p_enabled_features = enabled_features;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceCreateInfo>(
mut self,
next: &'a mut T,
) -> DeviceCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkInstanceCreateInfo.html>"]
pub struct InstanceCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: InstanceCreateFlags,
pub p_application_info: *const ApplicationInfo,
pub enabled_layer_count: u32,
pub pp_enabled_layer_names: *const *const c_char,
pub enabled_extension_count: u32,
pub pp_enabled_extension_names: *const *const c_char,
}
impl ::std::default::Default for InstanceCreateInfo {
fn default() -> InstanceCreateInfo {
InstanceCreateInfo {
s_type: StructureType::INSTANCE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: InstanceCreateFlags::default(),
p_application_info: ::std::ptr::null(),
enabled_layer_count: u32::default(),
pp_enabled_layer_names: ::std::ptr::null(),
enabled_extension_count: u32::default(),
pp_enabled_extension_names: ::std::ptr::null(),
}
}
}
impl InstanceCreateInfo {
pub fn builder<'a>() -> InstanceCreateInfoBuilder<'a> {
InstanceCreateInfoBuilder {
inner: InstanceCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct InstanceCreateInfoBuilder<'a> {
inner: InstanceCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsInstanceCreateInfo {}
impl<'a> ::std::ops::Deref for InstanceCreateInfoBuilder<'a> {
type Target = InstanceCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for InstanceCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> InstanceCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: InstanceCreateFlags) -> InstanceCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn application_info(
mut self,
application_info: &'a ApplicationInfo,
) -> InstanceCreateInfoBuilder<'a> {
self.inner.p_application_info = application_info;
self
}
pub fn enabled_layer_names(
mut self,
enabled_layer_names: &'a [*const c_char],
) -> InstanceCreateInfoBuilder<'a> {
self.inner.pp_enabled_layer_names = enabled_layer_names.as_ptr();
self.inner.enabled_layer_count = enabled_layer_names.len() as _;
self
}
pub fn enabled_extension_names(
mut self,
enabled_extension_names: &'a [*const c_char],
) -> InstanceCreateInfoBuilder<'a> {
self.inner.pp_enabled_extension_names = enabled_extension_names.as_ptr();
self.inner.enabled_extension_count = enabled_extension_names.len() as _;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsInstanceCreateInfo>(
mut self,
next: &'a mut T,
) -> InstanceCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> InstanceCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueueFamilyProperties.html>"]
pub struct QueueFamilyProperties {
pub queue_flags: QueueFlags,
pub queue_count: u32,
pub timestamp_valid_bits: u32,
pub min_image_transfer_granularity: Extent3D,
}
impl QueueFamilyProperties {
pub fn builder<'a>() -> QueueFamilyPropertiesBuilder<'a> {
QueueFamilyPropertiesBuilder {
inner: QueueFamilyProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct QueueFamilyPropertiesBuilder<'a> {
inner: QueueFamilyProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for QueueFamilyPropertiesBuilder<'a> {
type Target = QueueFamilyProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for QueueFamilyPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> QueueFamilyPropertiesBuilder<'a> {
pub fn queue_flags(mut self, queue_flags: QueueFlags) -> QueueFamilyPropertiesBuilder<'a> {
self.inner.queue_flags = queue_flags;
self
}
pub fn queue_count(mut self, queue_count: u32) -> QueueFamilyPropertiesBuilder<'a> {
self.inner.queue_count = queue_count;
self
}
pub fn timestamp_valid_bits(
mut self,
timestamp_valid_bits: u32,
) -> QueueFamilyPropertiesBuilder<'a> {
self.inner.timestamp_valid_bits = timestamp_valid_bits;
self
}
pub fn min_image_transfer_granularity(
mut self,
min_image_transfer_granularity: Extent3D,
) -> QueueFamilyPropertiesBuilder<'a> {
self.inner.min_image_transfer_granularity = min_image_transfer_granularity;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> QueueFamilyProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMemoryProperties.html>"]
pub struct PhysicalDeviceMemoryProperties {
pub memory_type_count: u32,
pub memory_types: [MemoryType; MAX_MEMORY_TYPES],
pub memory_heap_count: u32,
pub memory_heaps: [MemoryHeap; MAX_MEMORY_HEAPS],
}
impl ::std::default::Default for PhysicalDeviceMemoryProperties {
fn default() -> PhysicalDeviceMemoryProperties {
PhysicalDeviceMemoryProperties {
memory_type_count: u32::default(),
memory_types: unsafe { ::std::mem::zeroed() },
memory_heap_count: u32::default(),
memory_heaps: unsafe { ::std::mem::zeroed() },
}
}
}
impl PhysicalDeviceMemoryProperties {
pub fn builder<'a>() -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
PhysicalDeviceMemoryPropertiesBuilder {
inner: PhysicalDeviceMemoryProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMemoryPropertiesBuilder<'a> {
inner: PhysicalDeviceMemoryProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryPropertiesBuilder<'a> {
type Target = PhysicalDeviceMemoryProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMemoryPropertiesBuilder<'a> {
pub fn memory_type_count(
mut self,
memory_type_count: u32,
) -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
self.inner.memory_type_count = memory_type_count;
self
}
pub fn memory_types(
mut self,
memory_types: [MemoryType; MAX_MEMORY_TYPES],
) -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
self.inner.memory_types = memory_types;
self
}
pub fn memory_heap_count(
mut self,
memory_heap_count: u32,
) -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
self.inner.memory_heap_count = memory_heap_count;
self
}
pub fn memory_heaps(
mut self,
memory_heaps: [MemoryHeap; MAX_MEMORY_HEAPS],
) -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
self.inner.memory_heaps = memory_heaps;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMemoryProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryAllocateInfo.html>"]
pub struct MemoryAllocateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub allocation_size: DeviceSize,
pub memory_type_index: u32,
}
impl ::std::default::Default for MemoryAllocateInfo {
fn default() -> MemoryAllocateInfo {
MemoryAllocateInfo {
s_type: StructureType::MEMORY_ALLOCATE_INFO,
p_next: ::std::ptr::null(),
allocation_size: DeviceSize::default(),
memory_type_index: u32::default(),
}
}
}
impl MemoryAllocateInfo {
pub fn builder<'a>() -> MemoryAllocateInfoBuilder<'a> {
MemoryAllocateInfoBuilder {
inner: MemoryAllocateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryAllocateInfoBuilder<'a> {
inner: MemoryAllocateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryAllocateInfo {}
impl<'a> ::std::ops::Deref for MemoryAllocateInfoBuilder<'a> {
type Target = MemoryAllocateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryAllocateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryAllocateInfoBuilder<'a> {
pub fn allocation_size(mut self, allocation_size: DeviceSize) -> MemoryAllocateInfoBuilder<'a> {
self.inner.allocation_size = allocation_size;
self
}
pub fn memory_type_index(mut self, memory_type_index: u32) -> MemoryAllocateInfoBuilder<'a> {
self.inner.memory_type_index = memory_type_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryAllocateInfo>(
mut self,
next: &'a mut T,
) -> MemoryAllocateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryAllocateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryRequirements.html>"]
pub struct MemoryRequirements {
pub size: DeviceSize,
pub alignment: DeviceSize,
pub memory_type_bits: u32,
}
impl MemoryRequirements {
pub fn builder<'a>() -> MemoryRequirementsBuilder<'a> {
MemoryRequirementsBuilder {
inner: MemoryRequirements::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryRequirementsBuilder<'a> {
inner: MemoryRequirements,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for MemoryRequirementsBuilder<'a> {
type Target = MemoryRequirements;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryRequirementsBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryRequirementsBuilder<'a> {
pub fn size(mut self, size: DeviceSize) -> MemoryRequirementsBuilder<'a> {
self.inner.size = size;
self
}
pub fn alignment(mut self, alignment: DeviceSize) -> MemoryRequirementsBuilder<'a> {
self.inner.alignment = alignment;
self
}
pub fn memory_type_bits(mut self, memory_type_bits: u32) -> MemoryRequirementsBuilder<'a> {
self.inner.memory_type_bits = memory_type_bits;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryRequirements {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageFormatProperties.html>"]
pub struct SparseImageFormatProperties {
pub aspect_mask: ImageAspectFlags,
pub image_granularity: Extent3D,
pub flags: SparseImageFormatFlags,
}
impl SparseImageFormatProperties {
pub fn builder<'a>() -> SparseImageFormatPropertiesBuilder<'a> {
SparseImageFormatPropertiesBuilder {
inner: SparseImageFormatProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageFormatPropertiesBuilder<'a> {
inner: SparseImageFormatProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseImageFormatPropertiesBuilder<'a> {
type Target = SparseImageFormatProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageFormatPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageFormatPropertiesBuilder<'a> {
pub fn aspect_mask(
mut self,
aspect_mask: ImageAspectFlags,
) -> SparseImageFormatPropertiesBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
pub fn image_granularity(
mut self,
image_granularity: Extent3D,
) -> SparseImageFormatPropertiesBuilder<'a> {
self.inner.image_granularity = image_granularity;
self
}
pub fn flags(
mut self,
flags: SparseImageFormatFlags,
) -> SparseImageFormatPropertiesBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageFormatProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageMemoryRequirements.html>"]
pub struct SparseImageMemoryRequirements {
pub format_properties: SparseImageFormatProperties,
pub image_mip_tail_first_lod: u32,
pub image_mip_tail_size: DeviceSize,
pub image_mip_tail_offset: DeviceSize,
pub image_mip_tail_stride: DeviceSize,
}
impl SparseImageMemoryRequirements {
pub fn builder<'a>() -> SparseImageMemoryRequirementsBuilder<'a> {
SparseImageMemoryRequirementsBuilder {
inner: SparseImageMemoryRequirements::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageMemoryRequirementsBuilder<'a> {
inner: SparseImageMemoryRequirements,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseImageMemoryRequirementsBuilder<'a> {
type Target = SparseImageMemoryRequirements;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageMemoryRequirementsBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageMemoryRequirementsBuilder<'a> {
pub fn format_properties(
mut self,
format_properties: SparseImageFormatProperties,
) -> SparseImageMemoryRequirementsBuilder<'a> {
self.inner.format_properties = format_properties;
self
}
pub fn image_mip_tail_first_lod(
mut self,
image_mip_tail_first_lod: u32,
) -> SparseImageMemoryRequirementsBuilder<'a> {
self.inner.image_mip_tail_first_lod = image_mip_tail_first_lod;
self
}
pub fn image_mip_tail_size(
mut self,
image_mip_tail_size: DeviceSize,
) -> SparseImageMemoryRequirementsBuilder<'a> {
self.inner.image_mip_tail_size = image_mip_tail_size;
self
}
pub fn image_mip_tail_offset(
mut self,
image_mip_tail_offset: DeviceSize,
) -> SparseImageMemoryRequirementsBuilder<'a> {
self.inner.image_mip_tail_offset = image_mip_tail_offset;
self
}
pub fn image_mip_tail_stride(
mut self,
image_mip_tail_stride: DeviceSize,
) -> SparseImageMemoryRequirementsBuilder<'a> {
self.inner.image_mip_tail_stride = image_mip_tail_stride;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageMemoryRequirements {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryType.html>"]
pub struct MemoryType {
pub property_flags: MemoryPropertyFlags,
pub heap_index: u32,
}
impl MemoryType {
pub fn builder<'a>() -> MemoryTypeBuilder<'a> {
MemoryTypeBuilder {
inner: MemoryType::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryTypeBuilder<'a> {
inner: MemoryType,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for MemoryTypeBuilder<'a> {
type Target = MemoryType;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryTypeBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryTypeBuilder<'a> {
pub fn property_flags(mut self, property_flags: MemoryPropertyFlags) -> MemoryTypeBuilder<'a> {
self.inner.property_flags = property_flags;
self
}
pub fn heap_index(mut self, heap_index: u32) -> MemoryTypeBuilder<'a> {
self.inner.heap_index = heap_index;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryType {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryHeap.html>"]
pub struct MemoryHeap {
pub size: DeviceSize,
pub flags: MemoryHeapFlags,
}
impl MemoryHeap {
pub fn builder<'a>() -> MemoryHeapBuilder<'a> {
MemoryHeapBuilder {
inner: MemoryHeap::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryHeapBuilder<'a> {
inner: MemoryHeap,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for MemoryHeapBuilder<'a> {
type Target = MemoryHeap;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryHeapBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryHeapBuilder<'a> {
pub fn size(mut self, size: DeviceSize) -> MemoryHeapBuilder<'a> {
self.inner.size = size;
self
}
pub fn flags(mut self, flags: MemoryHeapFlags) -> MemoryHeapBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryHeap {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMappedMemoryRange.html>"]
pub struct MappedMemoryRange {
pub s_type: StructureType,
pub p_next: *const c_void,
pub memory: DeviceMemory,
pub offset: DeviceSize,
pub size: DeviceSize,
}
impl ::std::default::Default for MappedMemoryRange {
fn default() -> MappedMemoryRange {
MappedMemoryRange {
s_type: StructureType::MAPPED_MEMORY_RANGE,
p_next: ::std::ptr::null(),
memory: DeviceMemory::default(),
offset: DeviceSize::default(),
size: DeviceSize::default(),
}
}
}
impl MappedMemoryRange {
pub fn builder<'a>() -> MappedMemoryRangeBuilder<'a> {
MappedMemoryRangeBuilder {
inner: MappedMemoryRange::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MappedMemoryRangeBuilder<'a> {
inner: MappedMemoryRange,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMappedMemoryRange {}
impl<'a> ::std::ops::Deref for MappedMemoryRangeBuilder<'a> {
type Target = MappedMemoryRange;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MappedMemoryRangeBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MappedMemoryRangeBuilder<'a> {
pub fn memory(mut self, memory: DeviceMemory) -> MappedMemoryRangeBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn offset(mut self, offset: DeviceSize) -> MappedMemoryRangeBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn size(mut self, size: DeviceSize) -> MappedMemoryRangeBuilder<'a> {
self.inner.size = size;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMappedMemoryRange>(
mut self,
next: &'a mut T,
) -> MappedMemoryRangeBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MappedMemoryRange {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFormatProperties.html>"]
pub struct FormatProperties {
pub linear_tiling_features: FormatFeatureFlags,
pub optimal_tiling_features: FormatFeatureFlags,
pub buffer_features: FormatFeatureFlags,
}
impl FormatProperties {
pub fn builder<'a>() -> FormatPropertiesBuilder<'a> {
FormatPropertiesBuilder {
inner: FormatProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FormatPropertiesBuilder<'a> {
inner: FormatProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for FormatPropertiesBuilder<'a> {
type Target = FormatProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FormatPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FormatPropertiesBuilder<'a> {
pub fn linear_tiling_features(
mut self,
linear_tiling_features: FormatFeatureFlags,
) -> FormatPropertiesBuilder<'a> {
self.inner.linear_tiling_features = linear_tiling_features;
self
}
pub fn optimal_tiling_features(
mut self,
optimal_tiling_features: FormatFeatureFlags,
) -> FormatPropertiesBuilder<'a> {
self.inner.optimal_tiling_features = optimal_tiling_features;
self
}
pub fn buffer_features(
mut self,
buffer_features: FormatFeatureFlags,
) -> FormatPropertiesBuilder<'a> {
self.inner.buffer_features = buffer_features;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FormatProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageFormatProperties.html>"]
pub struct ImageFormatProperties {
pub max_extent: Extent3D,
pub max_mip_levels: u32,
pub max_array_layers: u32,
pub sample_counts: SampleCountFlags,
pub max_resource_size: DeviceSize,
}
impl ImageFormatProperties {
pub fn builder<'a>() -> ImageFormatPropertiesBuilder<'a> {
ImageFormatPropertiesBuilder {
inner: ImageFormatProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageFormatPropertiesBuilder<'a> {
inner: ImageFormatProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageFormatPropertiesBuilder<'a> {
type Target = ImageFormatProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageFormatPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageFormatPropertiesBuilder<'a> {
pub fn max_extent(mut self, max_extent: Extent3D) -> ImageFormatPropertiesBuilder<'a> {
self.inner.max_extent = max_extent;
self
}
pub fn max_mip_levels(mut self, max_mip_levels: u32) -> ImageFormatPropertiesBuilder<'a> {
self.inner.max_mip_levels = max_mip_levels;
self
}
pub fn max_array_layers(mut self, max_array_layers: u32) -> ImageFormatPropertiesBuilder<'a> {
self.inner.max_array_layers = max_array_layers;
self
}
pub fn sample_counts(
mut self,
sample_counts: SampleCountFlags,
) -> ImageFormatPropertiesBuilder<'a> {
self.inner.sample_counts = sample_counts;
self
}
pub fn max_resource_size(
mut self,
max_resource_size: DeviceSize,
) -> ImageFormatPropertiesBuilder<'a> {
self.inner.max_resource_size = max_resource_size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageFormatProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorBufferInfo.html>"]
pub struct DescriptorBufferInfo {
pub buffer: Buffer,
pub offset: DeviceSize,
pub range: DeviceSize,
}
impl DescriptorBufferInfo {
pub fn builder<'a>() -> DescriptorBufferInfoBuilder<'a> {
DescriptorBufferInfoBuilder {
inner: DescriptorBufferInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorBufferInfoBuilder<'a> {
inner: DescriptorBufferInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DescriptorBufferInfoBuilder<'a> {
type Target = DescriptorBufferInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorBufferInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorBufferInfoBuilder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> DescriptorBufferInfoBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn offset(mut self, offset: DeviceSize) -> DescriptorBufferInfoBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn range(mut self, range: DeviceSize) -> DescriptorBufferInfoBuilder<'a> {
self.inner.range = range;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorBufferInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorImageInfo.html>"]
pub struct DescriptorImageInfo {
pub sampler: Sampler,
pub image_view: ImageView,
pub image_layout: ImageLayout,
}
impl DescriptorImageInfo {
pub fn builder<'a>() -> DescriptorImageInfoBuilder<'a> {
DescriptorImageInfoBuilder {
inner: DescriptorImageInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorImageInfoBuilder<'a> {
inner: DescriptorImageInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DescriptorImageInfoBuilder<'a> {
type Target = DescriptorImageInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorImageInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorImageInfoBuilder<'a> {
pub fn sampler(mut self, sampler: Sampler) -> DescriptorImageInfoBuilder<'a> {
self.inner.sampler = sampler;
self
}
pub fn image_view(mut self, image_view: ImageView) -> DescriptorImageInfoBuilder<'a> {
self.inner.image_view = image_view;
self
}
pub fn image_layout(mut self, image_layout: ImageLayout) -> DescriptorImageInfoBuilder<'a> {
self.inner.image_layout = image_layout;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorImageInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWriteDescriptorSet.html>"]
pub struct WriteDescriptorSet {
pub s_type: StructureType,
pub p_next: *const c_void,
pub dst_set: DescriptorSet,
pub dst_binding: u32,
pub dst_array_element: u32,
pub descriptor_count: u32,
pub descriptor_type: DescriptorType,
pub p_image_info: *const DescriptorImageInfo,
pub p_buffer_info: *const DescriptorBufferInfo,
pub p_texel_buffer_view: *const BufferView,
}
impl ::std::default::Default for WriteDescriptorSet {
fn default() -> WriteDescriptorSet {
WriteDescriptorSet {
s_type: StructureType::WRITE_DESCRIPTOR_SET,
p_next: ::std::ptr::null(),
dst_set: DescriptorSet::default(),
dst_binding: u32::default(),
dst_array_element: u32::default(),
descriptor_count: u32::default(),
descriptor_type: DescriptorType::default(),
p_image_info: ::std::ptr::null(),
p_buffer_info: ::std::ptr::null(),
p_texel_buffer_view: ::std::ptr::null(),
}
}
}
impl WriteDescriptorSet {
pub fn builder<'a>() -> WriteDescriptorSetBuilder<'a> {
WriteDescriptorSetBuilder {
inner: WriteDescriptorSet::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct WriteDescriptorSetBuilder<'a> {
inner: WriteDescriptorSet,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsWriteDescriptorSet {}
impl<'a> ::std::ops::Deref for WriteDescriptorSetBuilder<'a> {
type Target = WriteDescriptorSet;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for WriteDescriptorSetBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> WriteDescriptorSetBuilder<'a> {
pub fn dst_set(mut self, dst_set: DescriptorSet) -> WriteDescriptorSetBuilder<'a> {
self.inner.dst_set = dst_set;
self
}
pub fn dst_binding(mut self, dst_binding: u32) -> WriteDescriptorSetBuilder<'a> {
self.inner.dst_binding = dst_binding;
self
}
pub fn dst_array_element(mut self, dst_array_element: u32) -> WriteDescriptorSetBuilder<'a> {
self.inner.dst_array_element = dst_array_element;
self
}
pub fn descriptor_type(
mut self,
descriptor_type: DescriptorType,
) -> WriteDescriptorSetBuilder<'a> {
self.inner.descriptor_type = descriptor_type;
self
}
pub fn image_info(
mut self,
image_info: &'a [DescriptorImageInfo],
) -> WriteDescriptorSetBuilder<'a> {
self.inner.descriptor_count = image_info.len() as _;
self.inner.p_image_info = image_info.as_ptr();
self
}
pub fn buffer_info(
mut self,
buffer_info: &'a [DescriptorBufferInfo],
) -> WriteDescriptorSetBuilder<'a> {
self.inner.descriptor_count = buffer_info.len() as _;
self.inner.p_buffer_info = buffer_info.as_ptr();
self
}
pub fn texel_buffer_view(
mut self,
texel_buffer_view: &'a [BufferView],
) -> WriteDescriptorSetBuilder<'a> {
self.inner.descriptor_count = texel_buffer_view.len() as _;
self.inner.p_texel_buffer_view = texel_buffer_view.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsWriteDescriptorSet>(
mut self,
next: &'a mut T,
) -> WriteDescriptorSetBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> WriteDescriptorSet {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCopyDescriptorSet.html>"]
pub struct CopyDescriptorSet {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_set: DescriptorSet,
pub src_binding: u32,
pub src_array_element: u32,
pub dst_set: DescriptorSet,
pub dst_binding: u32,
pub dst_array_element: u32,
pub descriptor_count: u32,
}
impl ::std::default::Default for CopyDescriptorSet {
fn default() -> CopyDescriptorSet {
CopyDescriptorSet {
s_type: StructureType::COPY_DESCRIPTOR_SET,
p_next: ::std::ptr::null(),
src_set: DescriptorSet::default(),
src_binding: u32::default(),
src_array_element: u32::default(),
dst_set: DescriptorSet::default(),
dst_binding: u32::default(),
dst_array_element: u32::default(),
descriptor_count: u32::default(),
}
}
}
impl CopyDescriptorSet {
pub fn builder<'a>() -> CopyDescriptorSetBuilder<'a> {
CopyDescriptorSetBuilder {
inner: CopyDescriptorSet::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CopyDescriptorSetBuilder<'a> {
inner: CopyDescriptorSet,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCopyDescriptorSet {}
impl<'a> ::std::ops::Deref for CopyDescriptorSetBuilder<'a> {
type Target = CopyDescriptorSet;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CopyDescriptorSetBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CopyDescriptorSetBuilder<'a> {
pub fn src_set(mut self, src_set: DescriptorSet) -> CopyDescriptorSetBuilder<'a> {
self.inner.src_set = src_set;
self
}
pub fn src_binding(mut self, src_binding: u32) -> CopyDescriptorSetBuilder<'a> {
self.inner.src_binding = src_binding;
self
}
pub fn src_array_element(mut self, src_array_element: u32) -> CopyDescriptorSetBuilder<'a> {
self.inner.src_array_element = src_array_element;
self
}
pub fn dst_set(mut self, dst_set: DescriptorSet) -> CopyDescriptorSetBuilder<'a> {
self.inner.dst_set = dst_set;
self
}
pub fn dst_binding(mut self, dst_binding: u32) -> CopyDescriptorSetBuilder<'a> {
self.inner.dst_binding = dst_binding;
self
}
pub fn dst_array_element(mut self, dst_array_element: u32) -> CopyDescriptorSetBuilder<'a> {
self.inner.dst_array_element = dst_array_element;
self
}
pub fn descriptor_count(mut self, descriptor_count: u32) -> CopyDescriptorSetBuilder<'a> {
self.inner.descriptor_count = descriptor_count;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCopyDescriptorSet>(
mut self,
next: &'a mut T,
) -> CopyDescriptorSetBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CopyDescriptorSet {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferCreateInfo.html>"]
pub struct BufferCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: BufferCreateFlags,
pub size: DeviceSize,
pub usage: BufferUsageFlags,
pub sharing_mode: SharingMode,
pub queue_family_index_count: u32,
pub p_queue_family_indices: *const u32,
}
impl ::std::default::Default for BufferCreateInfo {
fn default() -> BufferCreateInfo {
BufferCreateInfo {
s_type: StructureType::BUFFER_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: BufferCreateFlags::default(),
size: DeviceSize::default(),
usage: BufferUsageFlags::default(),
sharing_mode: SharingMode::default(),
queue_family_index_count: u32::default(),
p_queue_family_indices: ::std::ptr::null(),
}
}
}
impl BufferCreateInfo {
pub fn builder<'a>() -> BufferCreateInfoBuilder<'a> {
BufferCreateInfoBuilder {
inner: BufferCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferCreateInfoBuilder<'a> {
inner: BufferCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBufferCreateInfo {}
impl<'a> ::std::ops::Deref for BufferCreateInfoBuilder<'a> {
type Target = BufferCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: BufferCreateFlags) -> BufferCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn size(mut self, size: DeviceSize) -> BufferCreateInfoBuilder<'a> {
self.inner.size = size;
self
}
pub fn usage(mut self, usage: BufferUsageFlags) -> BufferCreateInfoBuilder<'a> {
self.inner.usage = usage;
self
}
pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> BufferCreateInfoBuilder<'a> {
self.inner.sharing_mode = sharing_mode;
self
}
pub fn queue_family_indices(
mut self,
queue_family_indices: &'a [u32],
) -> BufferCreateInfoBuilder<'a> {
self.inner.queue_family_index_count = queue_family_indices.len() as _;
self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBufferCreateInfo>(
mut self,
next: &'a mut T,
) -> BufferCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferViewCreateInfo.html>"]
pub struct BufferViewCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: BufferViewCreateFlags,
pub buffer: Buffer,
pub format: Format,
pub offset: DeviceSize,
pub range: DeviceSize,
}
impl ::std::default::Default for BufferViewCreateInfo {
fn default() -> BufferViewCreateInfo {
BufferViewCreateInfo {
s_type: StructureType::BUFFER_VIEW_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: BufferViewCreateFlags::default(),
buffer: Buffer::default(),
format: Format::default(),
offset: DeviceSize::default(),
range: DeviceSize::default(),
}
}
}
impl BufferViewCreateInfo {
pub fn builder<'a>() -> BufferViewCreateInfoBuilder<'a> {
BufferViewCreateInfoBuilder {
inner: BufferViewCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferViewCreateInfoBuilder<'a> {
inner: BufferViewCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBufferViewCreateInfo {}
impl<'a> ::std::ops::Deref for BufferViewCreateInfoBuilder<'a> {
type Target = BufferViewCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferViewCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferViewCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: BufferViewCreateFlags) -> BufferViewCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn buffer(mut self, buffer: Buffer) -> BufferViewCreateInfoBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn format(mut self, format: Format) -> BufferViewCreateInfoBuilder<'a> {
self.inner.format = format;
self
}
pub fn offset(mut self, offset: DeviceSize) -> BufferViewCreateInfoBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn range(mut self, range: DeviceSize) -> BufferViewCreateInfoBuilder<'a> {
self.inner.range = range;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBufferViewCreateInfo>(
mut self,
next: &'a mut T,
) -> BufferViewCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferViewCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageSubresource.html>"]
pub struct ImageSubresource {
pub aspect_mask: ImageAspectFlags,
pub mip_level: u32,
pub array_layer: u32,
}
impl ImageSubresource {
pub fn builder<'a>() -> ImageSubresourceBuilder<'a> {
ImageSubresourceBuilder {
inner: ImageSubresource::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageSubresourceBuilder<'a> {
inner: ImageSubresource,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageSubresourceBuilder<'a> {
type Target = ImageSubresource;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageSubresourceBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageSubresourceBuilder<'a> {
pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> ImageSubresourceBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
pub fn mip_level(mut self, mip_level: u32) -> ImageSubresourceBuilder<'a> {
self.inner.mip_level = mip_level;
self
}
pub fn array_layer(mut self, array_layer: u32) -> ImageSubresourceBuilder<'a> {
self.inner.array_layer = array_layer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageSubresource {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageSubresourceLayers.html>"]
pub struct ImageSubresourceLayers {
pub aspect_mask: ImageAspectFlags,
pub mip_level: u32,
pub base_array_layer: u32,
pub layer_count: u32,
}
impl ImageSubresourceLayers {
pub fn builder<'a>() -> ImageSubresourceLayersBuilder<'a> {
ImageSubresourceLayersBuilder {
inner: ImageSubresourceLayers::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageSubresourceLayersBuilder<'a> {
inner: ImageSubresourceLayers,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageSubresourceLayersBuilder<'a> {
type Target = ImageSubresourceLayers;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageSubresourceLayersBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageSubresourceLayersBuilder<'a> {
pub fn aspect_mask(
mut self,
aspect_mask: ImageAspectFlags,
) -> ImageSubresourceLayersBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
pub fn mip_level(mut self, mip_level: u32) -> ImageSubresourceLayersBuilder<'a> {
self.inner.mip_level = mip_level;
self
}
pub fn base_array_layer(mut self, base_array_layer: u32) -> ImageSubresourceLayersBuilder<'a> {
self.inner.base_array_layer = base_array_layer;
self
}
pub fn layer_count(mut self, layer_count: u32) -> ImageSubresourceLayersBuilder<'a> {
self.inner.layer_count = layer_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageSubresourceLayers {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageSubresourceRange.html>"]
pub struct ImageSubresourceRange {
pub aspect_mask: ImageAspectFlags,
pub base_mip_level: u32,
pub level_count: u32,
pub base_array_layer: u32,
pub layer_count: u32,
}
impl ImageSubresourceRange {
pub fn builder<'a>() -> ImageSubresourceRangeBuilder<'a> {
ImageSubresourceRangeBuilder {
inner: ImageSubresourceRange::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageSubresourceRangeBuilder<'a> {
inner: ImageSubresourceRange,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageSubresourceRangeBuilder<'a> {
type Target = ImageSubresourceRange;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageSubresourceRangeBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageSubresourceRangeBuilder<'a> {
pub fn aspect_mask(
mut self,
aspect_mask: ImageAspectFlags,
) -> ImageSubresourceRangeBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
pub fn base_mip_level(mut self, base_mip_level: u32) -> ImageSubresourceRangeBuilder<'a> {
self.inner.base_mip_level = base_mip_level;
self
}
pub fn level_count(mut self, level_count: u32) -> ImageSubresourceRangeBuilder<'a> {
self.inner.level_count = level_count;
self
}
pub fn base_array_layer(mut self, base_array_layer: u32) -> ImageSubresourceRangeBuilder<'a> {
self.inner.base_array_layer = base_array_layer;
self
}
pub fn layer_count(mut self, layer_count: u32) -> ImageSubresourceRangeBuilder<'a> {
self.inner.layer_count = layer_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageSubresourceRange {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryBarrier.html>"]
pub struct MemoryBarrier {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_access_mask: AccessFlags,
pub dst_access_mask: AccessFlags,
}
impl ::std::default::Default for MemoryBarrier {
fn default() -> MemoryBarrier {
MemoryBarrier {
s_type: StructureType::MEMORY_BARRIER,
p_next: ::std::ptr::null(),
src_access_mask: AccessFlags::default(),
dst_access_mask: AccessFlags::default(),
}
}
}
impl MemoryBarrier {
pub fn builder<'a>() -> MemoryBarrierBuilder<'a> {
MemoryBarrierBuilder {
inner: MemoryBarrier::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryBarrierBuilder<'a> {
inner: MemoryBarrier,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryBarrier {}
impl<'a> ::std::ops::Deref for MemoryBarrierBuilder<'a> {
type Target = MemoryBarrier;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryBarrierBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryBarrierBuilder<'a> {
pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> MemoryBarrierBuilder<'a> {
self.inner.src_access_mask = src_access_mask;
self
}
pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> MemoryBarrierBuilder<'a> {
self.inner.dst_access_mask = dst_access_mask;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryBarrier>(
mut self,
next: &'a mut T,
) -> MemoryBarrierBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryBarrier {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferMemoryBarrier.html>"]
pub struct BufferMemoryBarrier {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_access_mask: AccessFlags,
pub dst_access_mask: AccessFlags,
pub src_queue_family_index: u32,
pub dst_queue_family_index: u32,
pub buffer: Buffer,
pub offset: DeviceSize,
pub size: DeviceSize,
}
impl ::std::default::Default for BufferMemoryBarrier {
fn default() -> BufferMemoryBarrier {
BufferMemoryBarrier {
s_type: StructureType::BUFFER_MEMORY_BARRIER,
p_next: ::std::ptr::null(),
src_access_mask: AccessFlags::default(),
dst_access_mask: AccessFlags::default(),
src_queue_family_index: u32::default(),
dst_queue_family_index: u32::default(),
buffer: Buffer::default(),
offset: DeviceSize::default(),
size: DeviceSize::default(),
}
}
}
impl BufferMemoryBarrier {
pub fn builder<'a>() -> BufferMemoryBarrierBuilder<'a> {
BufferMemoryBarrierBuilder {
inner: BufferMemoryBarrier::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferMemoryBarrierBuilder<'a> {
inner: BufferMemoryBarrier,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBufferMemoryBarrier {}
impl<'a> ::std::ops::Deref for BufferMemoryBarrierBuilder<'a> {
type Target = BufferMemoryBarrier;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferMemoryBarrierBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferMemoryBarrierBuilder<'a> {
pub fn src_access_mask(
mut self,
src_access_mask: AccessFlags,
) -> BufferMemoryBarrierBuilder<'a> {
self.inner.src_access_mask = src_access_mask;
self
}
pub fn dst_access_mask(
mut self,
dst_access_mask: AccessFlags,
) -> BufferMemoryBarrierBuilder<'a> {
self.inner.dst_access_mask = dst_access_mask;
self
}
pub fn src_queue_family_index(
mut self,
src_queue_family_index: u32,
) -> BufferMemoryBarrierBuilder<'a> {
self.inner.src_queue_family_index = src_queue_family_index;
self
}
pub fn dst_queue_family_index(
mut self,
dst_queue_family_index: u32,
) -> BufferMemoryBarrierBuilder<'a> {
self.inner.dst_queue_family_index = dst_queue_family_index;
self
}
pub fn buffer(mut self, buffer: Buffer) -> BufferMemoryBarrierBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn offset(mut self, offset: DeviceSize) -> BufferMemoryBarrierBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn size(mut self, size: DeviceSize) -> BufferMemoryBarrierBuilder<'a> {
self.inner.size = size;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBufferMemoryBarrier>(
mut self,
next: &'a mut T,
) -> BufferMemoryBarrierBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferMemoryBarrier {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageMemoryBarrier.html>"]
pub struct ImageMemoryBarrier {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_access_mask: AccessFlags,
pub dst_access_mask: AccessFlags,
pub old_layout: ImageLayout,
pub new_layout: ImageLayout,
pub src_queue_family_index: u32,
pub dst_queue_family_index: u32,
pub image: Image,
pub subresource_range: ImageSubresourceRange,
}
impl ::std::default::Default for ImageMemoryBarrier {
fn default() -> ImageMemoryBarrier {
ImageMemoryBarrier {
s_type: StructureType::IMAGE_MEMORY_BARRIER,
p_next: ::std::ptr::null(),
src_access_mask: AccessFlags::default(),
dst_access_mask: AccessFlags::default(),
old_layout: ImageLayout::default(),
new_layout: ImageLayout::default(),
src_queue_family_index: u32::default(),
dst_queue_family_index: u32::default(),
image: Image::default(),
subresource_range: ImageSubresourceRange::default(),
}
}
}
impl ImageMemoryBarrier {
pub fn builder<'a>() -> ImageMemoryBarrierBuilder<'a> {
ImageMemoryBarrierBuilder {
inner: ImageMemoryBarrier::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageMemoryBarrierBuilder<'a> {
inner: ImageMemoryBarrier,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageMemoryBarrier {}
impl<'a> ::std::ops::Deref for ImageMemoryBarrierBuilder<'a> {
type Target = ImageMemoryBarrier;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageMemoryBarrierBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageMemoryBarrierBuilder<'a> {
pub fn src_access_mask(
mut self,
src_access_mask: AccessFlags,
) -> ImageMemoryBarrierBuilder<'a> {
self.inner.src_access_mask = src_access_mask;
self
}
pub fn dst_access_mask(
mut self,
dst_access_mask: AccessFlags,
) -> ImageMemoryBarrierBuilder<'a> {
self.inner.dst_access_mask = dst_access_mask;
self
}
pub fn old_layout(mut self, old_layout: ImageLayout) -> ImageMemoryBarrierBuilder<'a> {
self.inner.old_layout = old_layout;
self
}
pub fn new_layout(mut self, new_layout: ImageLayout) -> ImageMemoryBarrierBuilder<'a> {
self.inner.new_layout = new_layout;
self
}
pub fn src_queue_family_index(
mut self,
src_queue_family_index: u32,
) -> ImageMemoryBarrierBuilder<'a> {
self.inner.src_queue_family_index = src_queue_family_index;
self
}
pub fn dst_queue_family_index(
mut self,
dst_queue_family_index: u32,
) -> ImageMemoryBarrierBuilder<'a> {
self.inner.dst_queue_family_index = dst_queue_family_index;
self
}
pub fn image(mut self, image: Image) -> ImageMemoryBarrierBuilder<'a> {
self.inner.image = image;
self
}
pub fn subresource_range(
mut self,
subresource_range: ImageSubresourceRange,
) -> ImageMemoryBarrierBuilder<'a> {
self.inner.subresource_range = subresource_range;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageMemoryBarrier>(
mut self,
next: &'a mut T,
) -> ImageMemoryBarrierBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageMemoryBarrier {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageCreateInfo.html>"]
pub struct ImageCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ImageCreateFlags,
pub image_type: ImageType,
pub format: Format,
pub extent: Extent3D,
pub mip_levels: u32,
pub array_layers: u32,
pub samples: SampleCountFlags,
pub tiling: ImageTiling,
pub usage: ImageUsageFlags,
pub sharing_mode: SharingMode,
pub queue_family_index_count: u32,
pub p_queue_family_indices: *const u32,
pub initial_layout: ImageLayout,
}
impl ::std::default::Default for ImageCreateInfo {
fn default() -> ImageCreateInfo {
ImageCreateInfo {
s_type: StructureType::IMAGE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: ImageCreateFlags::default(),
image_type: ImageType::default(),
format: Format::default(),
extent: Extent3D::default(),
mip_levels: u32::default(),
array_layers: u32::default(),
samples: SampleCountFlags::default(),
tiling: ImageTiling::default(),
usage: ImageUsageFlags::default(),
sharing_mode: SharingMode::default(),
queue_family_index_count: u32::default(),
p_queue_family_indices: ::std::ptr::null(),
initial_layout: ImageLayout::default(),
}
}
}
impl ImageCreateInfo {
pub fn builder<'a>() -> ImageCreateInfoBuilder<'a> {
ImageCreateInfoBuilder {
inner: ImageCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageCreateInfoBuilder<'a> {
inner: ImageCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageCreateInfo {}
impl<'a> ::std::ops::Deref for ImageCreateInfoBuilder<'a> {
type Target = ImageCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: ImageCreateFlags) -> ImageCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn image_type(mut self, image_type: ImageType) -> ImageCreateInfoBuilder<'a> {
self.inner.image_type = image_type;
self
}
pub fn format(mut self, format: Format) -> ImageCreateInfoBuilder<'a> {
self.inner.format = format;
self
}
pub fn extent(mut self, extent: Extent3D) -> ImageCreateInfoBuilder<'a> {
self.inner.extent = extent;
self
}
pub fn mip_levels(mut self, mip_levels: u32) -> ImageCreateInfoBuilder<'a> {
self.inner.mip_levels = mip_levels;
self
}
pub fn array_layers(mut self, array_layers: u32) -> ImageCreateInfoBuilder<'a> {
self.inner.array_layers = array_layers;
self
}
pub fn samples(mut self, samples: SampleCountFlags) -> ImageCreateInfoBuilder<'a> {
self.inner.samples = samples;
self
}
pub fn tiling(mut self, tiling: ImageTiling) -> ImageCreateInfoBuilder<'a> {
self.inner.tiling = tiling;
self
}
pub fn usage(mut self, usage: ImageUsageFlags) -> ImageCreateInfoBuilder<'a> {
self.inner.usage = usage;
self
}
pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> ImageCreateInfoBuilder<'a> {
self.inner.sharing_mode = sharing_mode;
self
}
pub fn queue_family_indices(
mut self,
queue_family_indices: &'a [u32],
) -> ImageCreateInfoBuilder<'a> {
self.inner.queue_family_index_count = queue_family_indices.len() as _;
self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
self
}
pub fn initial_layout(mut self, initial_layout: ImageLayout) -> ImageCreateInfoBuilder<'a> {
self.inner.initial_layout = initial_layout;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageCreateInfo>(
mut self,
next: &'a mut T,
) -> ImageCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubresourceLayout.html>"]
pub struct SubresourceLayout {
pub offset: DeviceSize,
pub size: DeviceSize,
pub row_pitch: DeviceSize,
pub array_pitch: DeviceSize,
pub depth_pitch: DeviceSize,
}
impl SubresourceLayout {
pub fn builder<'a>() -> SubresourceLayoutBuilder<'a> {
SubresourceLayoutBuilder {
inner: SubresourceLayout::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubresourceLayoutBuilder<'a> {
inner: SubresourceLayout,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SubresourceLayoutBuilder<'a> {
type Target = SubresourceLayout;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubresourceLayoutBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubresourceLayoutBuilder<'a> {
pub fn offset(mut self, offset: DeviceSize) -> SubresourceLayoutBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn size(mut self, size: DeviceSize) -> SubresourceLayoutBuilder<'a> {
self.inner.size = size;
self
}
pub fn row_pitch(mut self, row_pitch: DeviceSize) -> SubresourceLayoutBuilder<'a> {
self.inner.row_pitch = row_pitch;
self
}
pub fn array_pitch(mut self, array_pitch: DeviceSize) -> SubresourceLayoutBuilder<'a> {
self.inner.array_pitch = array_pitch;
self
}
pub fn depth_pitch(mut self, depth_pitch: DeviceSize) -> SubresourceLayoutBuilder<'a> {
self.inner.depth_pitch = depth_pitch;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubresourceLayout {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageViewCreateInfo.html>"]
pub struct ImageViewCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ImageViewCreateFlags,
pub image: Image,
pub view_type: ImageViewType,
pub format: Format,
pub components: ComponentMapping,
pub subresource_range: ImageSubresourceRange,
}
impl ::std::default::Default for ImageViewCreateInfo {
fn default() -> ImageViewCreateInfo {
ImageViewCreateInfo {
s_type: StructureType::IMAGE_VIEW_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: ImageViewCreateFlags::default(),
image: Image::default(),
view_type: ImageViewType::default(),
format: Format::default(),
components: ComponentMapping::default(),
subresource_range: ImageSubresourceRange::default(),
}
}
}
impl ImageViewCreateInfo {
pub fn builder<'a>() -> ImageViewCreateInfoBuilder<'a> {
ImageViewCreateInfoBuilder {
inner: ImageViewCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageViewCreateInfoBuilder<'a> {
inner: ImageViewCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageViewCreateInfo {}
impl<'a> ::std::ops::Deref for ImageViewCreateInfoBuilder<'a> {
type Target = ImageViewCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageViewCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageViewCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: ImageViewCreateFlags) -> ImageViewCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn image(mut self, image: Image) -> ImageViewCreateInfoBuilder<'a> {
self.inner.image = image;
self
}
pub fn view_type(mut self, view_type: ImageViewType) -> ImageViewCreateInfoBuilder<'a> {
self.inner.view_type = view_type;
self
}
pub fn format(mut self, format: Format) -> ImageViewCreateInfoBuilder<'a> {
self.inner.format = format;
self
}
pub fn components(mut self, components: ComponentMapping) -> ImageViewCreateInfoBuilder<'a> {
self.inner.components = components;
self
}
pub fn subresource_range(
mut self,
subresource_range: ImageSubresourceRange,
) -> ImageViewCreateInfoBuilder<'a> {
self.inner.subresource_range = subresource_range;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageViewCreateInfo>(
mut self,
next: &'a mut T,
) -> ImageViewCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageViewCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferCopy.html>"]
pub struct BufferCopy {
pub src_offset: DeviceSize,
pub dst_offset: DeviceSize,
pub size: DeviceSize,
}
impl BufferCopy {
pub fn builder<'a>() -> BufferCopyBuilder<'a> {
BufferCopyBuilder {
inner: BufferCopy::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferCopyBuilder<'a> {
inner: BufferCopy,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for BufferCopyBuilder<'a> {
type Target = BufferCopy;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferCopyBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferCopyBuilder<'a> {
pub fn src_offset(mut self, src_offset: DeviceSize) -> BufferCopyBuilder<'a> {
self.inner.src_offset = src_offset;
self
}
pub fn dst_offset(mut self, dst_offset: DeviceSize) -> BufferCopyBuilder<'a> {
self.inner.dst_offset = dst_offset;
self
}
pub fn size(mut self, size: DeviceSize) -> BufferCopyBuilder<'a> {
self.inner.size = size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferCopy {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseMemoryBind.html>"]
pub struct SparseMemoryBind {
pub resource_offset: DeviceSize,
pub size: DeviceSize,
pub memory: DeviceMemory,
pub memory_offset: DeviceSize,
pub flags: SparseMemoryBindFlags,
}
impl SparseMemoryBind {
pub fn builder<'a>() -> SparseMemoryBindBuilder<'a> {
SparseMemoryBindBuilder {
inner: SparseMemoryBind::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseMemoryBindBuilder<'a> {
inner: SparseMemoryBind,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseMemoryBindBuilder<'a> {
type Target = SparseMemoryBind;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseMemoryBindBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseMemoryBindBuilder<'a> {
pub fn resource_offset(mut self, resource_offset: DeviceSize) -> SparseMemoryBindBuilder<'a> {
self.inner.resource_offset = resource_offset;
self
}
pub fn size(mut self, size: DeviceSize) -> SparseMemoryBindBuilder<'a> {
self.inner.size = size;
self
}
pub fn memory(mut self, memory: DeviceMemory) -> SparseMemoryBindBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn memory_offset(mut self, memory_offset: DeviceSize) -> SparseMemoryBindBuilder<'a> {
self.inner.memory_offset = memory_offset;
self
}
pub fn flags(mut self, flags: SparseMemoryBindFlags) -> SparseMemoryBindBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseMemoryBind {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageMemoryBind.html>"]
pub struct SparseImageMemoryBind {
pub subresource: ImageSubresource,
pub offset: Offset3D,
pub extent: Extent3D,
pub memory: DeviceMemory,
pub memory_offset: DeviceSize,
pub flags: SparseMemoryBindFlags,
}
impl SparseImageMemoryBind {
pub fn builder<'a>() -> SparseImageMemoryBindBuilder<'a> {
SparseImageMemoryBindBuilder {
inner: SparseImageMemoryBind::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageMemoryBindBuilder<'a> {
inner: SparseImageMemoryBind,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseImageMemoryBindBuilder<'a> {
type Target = SparseImageMemoryBind;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageMemoryBindBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageMemoryBindBuilder<'a> {
pub fn subresource(
mut self,
subresource: ImageSubresource,
) -> SparseImageMemoryBindBuilder<'a> {
self.inner.subresource = subresource;
self
}
pub fn offset(mut self, offset: Offset3D) -> SparseImageMemoryBindBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn extent(mut self, extent: Extent3D) -> SparseImageMemoryBindBuilder<'a> {
self.inner.extent = extent;
self
}
pub fn memory(mut self, memory: DeviceMemory) -> SparseImageMemoryBindBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn memory_offset(mut self, memory_offset: DeviceSize) -> SparseImageMemoryBindBuilder<'a> {
self.inner.memory_offset = memory_offset;
self
}
pub fn flags(mut self, flags: SparseMemoryBindFlags) -> SparseImageMemoryBindBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageMemoryBind {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseBufferMemoryBindInfo.html>"]
pub struct SparseBufferMemoryBindInfo {
pub buffer: Buffer,
pub bind_count: u32,
pub p_binds: *const SparseMemoryBind,
}
impl ::std::default::Default for SparseBufferMemoryBindInfo {
fn default() -> SparseBufferMemoryBindInfo {
SparseBufferMemoryBindInfo {
buffer: Buffer::default(),
bind_count: u32::default(),
p_binds: ::std::ptr::null(),
}
}
}
impl SparseBufferMemoryBindInfo {
pub fn builder<'a>() -> SparseBufferMemoryBindInfoBuilder<'a> {
SparseBufferMemoryBindInfoBuilder {
inner: SparseBufferMemoryBindInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseBufferMemoryBindInfoBuilder<'a> {
inner: SparseBufferMemoryBindInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseBufferMemoryBindInfoBuilder<'a> {
type Target = SparseBufferMemoryBindInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseBufferMemoryBindInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseBufferMemoryBindInfoBuilder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> SparseBufferMemoryBindInfoBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn binds(mut self, binds: &'a [SparseMemoryBind]) -> SparseBufferMemoryBindInfoBuilder<'a> {
self.inner.bind_count = binds.len() as _;
self.inner.p_binds = binds.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseBufferMemoryBindInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageOpaqueMemoryBindInfo.html>"]
pub struct SparseImageOpaqueMemoryBindInfo {
pub image: Image,
pub bind_count: u32,
pub p_binds: *const SparseMemoryBind,
}
impl ::std::default::Default for SparseImageOpaqueMemoryBindInfo {
fn default() -> SparseImageOpaqueMemoryBindInfo {
SparseImageOpaqueMemoryBindInfo {
image: Image::default(),
bind_count: u32::default(),
p_binds: ::std::ptr::null(),
}
}
}
impl SparseImageOpaqueMemoryBindInfo {
pub fn builder<'a>() -> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
SparseImageOpaqueMemoryBindInfoBuilder {
inner: SparseImageOpaqueMemoryBindInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageOpaqueMemoryBindInfoBuilder<'a> {
inner: SparseImageOpaqueMemoryBindInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseImageOpaqueMemoryBindInfoBuilder<'a> {
type Target = SparseImageOpaqueMemoryBindInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageOpaqueMemoryBindInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
pub fn image(mut self, image: Image) -> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
self.inner.image = image;
self
}
pub fn binds(
mut self,
binds: &'a [SparseMemoryBind],
) -> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
self.inner.bind_count = binds.len() as _;
self.inner.p_binds = binds.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageOpaqueMemoryBindInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageMemoryBindInfo.html>"]
pub struct SparseImageMemoryBindInfo {
pub image: Image,
pub bind_count: u32,
pub p_binds: *const SparseImageMemoryBind,
}
impl ::std::default::Default for SparseImageMemoryBindInfo {
fn default() -> SparseImageMemoryBindInfo {
SparseImageMemoryBindInfo {
image: Image::default(),
bind_count: u32::default(),
p_binds: ::std::ptr::null(),
}
}
}
impl SparseImageMemoryBindInfo {
pub fn builder<'a>() -> SparseImageMemoryBindInfoBuilder<'a> {
SparseImageMemoryBindInfoBuilder {
inner: SparseImageMemoryBindInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageMemoryBindInfoBuilder<'a> {
inner: SparseImageMemoryBindInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SparseImageMemoryBindInfoBuilder<'a> {
type Target = SparseImageMemoryBindInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageMemoryBindInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageMemoryBindInfoBuilder<'a> {
pub fn image(mut self, image: Image) -> SparseImageMemoryBindInfoBuilder<'a> {
self.inner.image = image;
self
}
pub fn binds(
mut self,
binds: &'a [SparseImageMemoryBind],
) -> SparseImageMemoryBindInfoBuilder<'a> {
self.inner.bind_count = binds.len() as _;
self.inner.p_binds = binds.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageMemoryBindInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindSparseInfo.html>"]
pub struct BindSparseInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub wait_semaphore_count: u32,
pub p_wait_semaphores: *const Semaphore,
pub buffer_bind_count: u32,
pub p_buffer_binds: *const SparseBufferMemoryBindInfo,
pub image_opaque_bind_count: u32,
pub p_image_opaque_binds: *const SparseImageOpaqueMemoryBindInfo,
pub image_bind_count: u32,
pub p_image_binds: *const SparseImageMemoryBindInfo,
pub signal_semaphore_count: u32,
pub p_signal_semaphores: *const Semaphore,
}
impl ::std::default::Default for BindSparseInfo {
fn default() -> BindSparseInfo {
BindSparseInfo {
s_type: StructureType::BIND_SPARSE_INFO,
p_next: ::std::ptr::null(),
wait_semaphore_count: u32::default(),
p_wait_semaphores: ::std::ptr::null(),
buffer_bind_count: u32::default(),
p_buffer_binds: ::std::ptr::null(),
image_opaque_bind_count: u32::default(),
p_image_opaque_binds: ::std::ptr::null(),
image_bind_count: u32::default(),
p_image_binds: ::std::ptr::null(),
signal_semaphore_count: u32::default(),
p_signal_semaphores: ::std::ptr::null(),
}
}
}
impl BindSparseInfo {
pub fn builder<'a>() -> BindSparseInfoBuilder<'a> {
BindSparseInfoBuilder {
inner: BindSparseInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindSparseInfoBuilder<'a> {
inner: BindSparseInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBindSparseInfo {}
impl<'a> ::std::ops::Deref for BindSparseInfoBuilder<'a> {
type Target = BindSparseInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindSparseInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindSparseInfoBuilder<'a> {
pub fn wait_semaphores(
mut self,
wait_semaphores: &'a [Semaphore],
) -> BindSparseInfoBuilder<'a> {
self.inner.wait_semaphore_count = wait_semaphores.len() as _;
self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
self
}
pub fn buffer_binds(
mut self,
buffer_binds: &'a [SparseBufferMemoryBindInfo],
) -> BindSparseInfoBuilder<'a> {
self.inner.buffer_bind_count = buffer_binds.len() as _;
self.inner.p_buffer_binds = buffer_binds.as_ptr();
self
}
pub fn image_opaque_binds(
mut self,
image_opaque_binds: &'a [SparseImageOpaqueMemoryBindInfo],
) -> BindSparseInfoBuilder<'a> {
self.inner.image_opaque_bind_count = image_opaque_binds.len() as _;
self.inner.p_image_opaque_binds = image_opaque_binds.as_ptr();
self
}
pub fn image_binds(
mut self,
image_binds: &'a [SparseImageMemoryBindInfo],
) -> BindSparseInfoBuilder<'a> {
self.inner.image_bind_count = image_binds.len() as _;
self.inner.p_image_binds = image_binds.as_ptr();
self
}
pub fn signal_semaphores(
mut self,
signal_semaphores: &'a [Semaphore],
) -> BindSparseInfoBuilder<'a> {
self.inner.signal_semaphore_count = signal_semaphores.len() as _;
self.inner.p_signal_semaphores = signal_semaphores.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBindSparseInfo>(
mut self,
next: &'a mut T,
) -> BindSparseInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindSparseInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageCopy.html>"]
pub struct ImageCopy {
pub src_subresource: ImageSubresourceLayers,
pub src_offset: Offset3D,
pub dst_subresource: ImageSubresourceLayers,
pub dst_offset: Offset3D,
pub extent: Extent3D,
}
impl ImageCopy {
pub fn builder<'a>() -> ImageCopyBuilder<'a> {
ImageCopyBuilder {
inner: ImageCopy::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageCopyBuilder<'a> {
inner: ImageCopy,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageCopyBuilder<'a> {
type Target = ImageCopy;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageCopyBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageCopyBuilder<'a> {
pub fn src_subresource(
mut self,
src_subresource: ImageSubresourceLayers,
) -> ImageCopyBuilder<'a> {
self.inner.src_subresource = src_subresource;
self
}
pub fn src_offset(mut self, src_offset: Offset3D) -> ImageCopyBuilder<'a> {
self.inner.src_offset = src_offset;
self
}
pub fn dst_subresource(
mut self,
dst_subresource: ImageSubresourceLayers,
) -> ImageCopyBuilder<'a> {
self.inner.dst_subresource = dst_subresource;
self
}
pub fn dst_offset(mut self, dst_offset: Offset3D) -> ImageCopyBuilder<'a> {
self.inner.dst_offset = dst_offset;
self
}
pub fn extent(mut self, extent: Extent3D) -> ImageCopyBuilder<'a> {
self.inner.extent = extent;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageCopy {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageBlit.html>"]
pub struct ImageBlit {
pub src_subresource: ImageSubresourceLayers,
pub src_offsets: [Offset3D; 2],
pub dst_subresource: ImageSubresourceLayers,
pub dst_offsets: [Offset3D; 2],
}
impl ::std::default::Default for ImageBlit {
fn default() -> ImageBlit {
ImageBlit {
src_subresource: ImageSubresourceLayers::default(),
src_offsets: unsafe { ::std::mem::zeroed() },
dst_subresource: ImageSubresourceLayers::default(),
dst_offsets: unsafe { ::std::mem::zeroed() },
}
}
}
impl ImageBlit {
pub fn builder<'a>() -> ImageBlitBuilder<'a> {
ImageBlitBuilder {
inner: ImageBlit::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageBlitBuilder<'a> {
inner: ImageBlit,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageBlitBuilder<'a> {
type Target = ImageBlit;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageBlitBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageBlitBuilder<'a> {
pub fn src_subresource(
mut self,
src_subresource: ImageSubresourceLayers,
) -> ImageBlitBuilder<'a> {
self.inner.src_subresource = src_subresource;
self
}
pub fn src_offsets(mut self, src_offsets: [Offset3D; 2]) -> ImageBlitBuilder<'a> {
self.inner.src_offsets = src_offsets;
self
}
pub fn dst_subresource(
mut self,
dst_subresource: ImageSubresourceLayers,
) -> ImageBlitBuilder<'a> {
self.inner.dst_subresource = dst_subresource;
self
}
pub fn dst_offsets(mut self, dst_offsets: [Offset3D; 2]) -> ImageBlitBuilder<'a> {
self.inner.dst_offsets = dst_offsets;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageBlit {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferImageCopy.html>"]
pub struct BufferImageCopy {
pub buffer_offset: DeviceSize,
pub buffer_row_length: u32,
pub buffer_image_height: u32,
pub image_subresource: ImageSubresourceLayers,
pub image_offset: Offset3D,
pub image_extent: Extent3D,
}
impl BufferImageCopy {
pub fn builder<'a>() -> BufferImageCopyBuilder<'a> {
BufferImageCopyBuilder {
inner: BufferImageCopy::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferImageCopyBuilder<'a> {
inner: BufferImageCopy,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for BufferImageCopyBuilder<'a> {
type Target = BufferImageCopy;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferImageCopyBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferImageCopyBuilder<'a> {
pub fn buffer_offset(mut self, buffer_offset: DeviceSize) -> BufferImageCopyBuilder<'a> {
self.inner.buffer_offset = buffer_offset;
self
}
pub fn buffer_row_length(mut self, buffer_row_length: u32) -> BufferImageCopyBuilder<'a> {
self.inner.buffer_row_length = buffer_row_length;
self
}
pub fn buffer_image_height(mut self, buffer_image_height: u32) -> BufferImageCopyBuilder<'a> {
self.inner.buffer_image_height = buffer_image_height;
self
}
pub fn image_subresource(
mut self,
image_subresource: ImageSubresourceLayers,
) -> BufferImageCopyBuilder<'a> {
self.inner.image_subresource = image_subresource;
self
}
pub fn image_offset(mut self, image_offset: Offset3D) -> BufferImageCopyBuilder<'a> {
self.inner.image_offset = image_offset;
self
}
pub fn image_extent(mut self, image_extent: Extent3D) -> BufferImageCopyBuilder<'a> {
self.inner.image_extent = image_extent;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferImageCopy {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageResolve.html>"]
pub struct ImageResolve {
pub src_subresource: ImageSubresourceLayers,
pub src_offset: Offset3D,
pub dst_subresource: ImageSubresourceLayers,
pub dst_offset: Offset3D,
pub extent: Extent3D,
}
impl ImageResolve {
pub fn builder<'a>() -> ImageResolveBuilder<'a> {
ImageResolveBuilder {
inner: ImageResolve::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageResolveBuilder<'a> {
inner: ImageResolve,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ImageResolveBuilder<'a> {
type Target = ImageResolve;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageResolveBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageResolveBuilder<'a> {
pub fn src_subresource(
mut self,
src_subresource: ImageSubresourceLayers,
) -> ImageResolveBuilder<'a> {
self.inner.src_subresource = src_subresource;
self
}
pub fn src_offset(mut self, src_offset: Offset3D) -> ImageResolveBuilder<'a> {
self.inner.src_offset = src_offset;
self
}
pub fn dst_subresource(
mut self,
dst_subresource: ImageSubresourceLayers,
) -> ImageResolveBuilder<'a> {
self.inner.dst_subresource = dst_subresource;
self
}
pub fn dst_offset(mut self, dst_offset: Offset3D) -> ImageResolveBuilder<'a> {
self.inner.dst_offset = dst_offset;
self
}
pub fn extent(mut self, extent: Extent3D) -> ImageResolveBuilder<'a> {
self.inner.extent = extent;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageResolve {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderModuleCreateInfo.html>"]
pub struct ShaderModuleCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ShaderModuleCreateFlags,
pub code_size: usize,
pub p_code: *const u32,
}
impl ::std::default::Default for ShaderModuleCreateInfo {
fn default() -> ShaderModuleCreateInfo {
ShaderModuleCreateInfo {
s_type: StructureType::SHADER_MODULE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: ShaderModuleCreateFlags::default(),
code_size: usize::default(),
p_code: ::std::ptr::null(),
}
}
}
impl ShaderModuleCreateInfo {
pub fn builder<'a>() -> ShaderModuleCreateInfoBuilder<'a> {
ShaderModuleCreateInfoBuilder {
inner: ShaderModuleCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ShaderModuleCreateInfoBuilder<'a> {
inner: ShaderModuleCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsShaderModuleCreateInfo {}
impl<'a> ::std::ops::Deref for ShaderModuleCreateInfoBuilder<'a> {
type Target = ShaderModuleCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ShaderModuleCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ShaderModuleCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: ShaderModuleCreateFlags) -> ShaderModuleCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn code(mut self, code: &'a [u32]) -> ShaderModuleCreateInfoBuilder<'a> {
self.inner.code_size = code.len() * 4;
self.inner.p_code = code.as_ptr() as *const u32;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsShaderModuleCreateInfo>(
mut self,
next: &'a mut T,
) -> ShaderModuleCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ShaderModuleCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayoutBinding.html>"]
pub struct DescriptorSetLayoutBinding {
pub binding: u32,
pub descriptor_type: DescriptorType,
pub descriptor_count: u32,
pub stage_flags: ShaderStageFlags,
pub p_immutable_samplers: *const Sampler,
}
impl ::std::default::Default for DescriptorSetLayoutBinding {
fn default() -> DescriptorSetLayoutBinding {
DescriptorSetLayoutBinding {
binding: u32::default(),
descriptor_type: DescriptorType::default(),
descriptor_count: u32::default(),
stage_flags: ShaderStageFlags::default(),
p_immutable_samplers: ::std::ptr::null(),
}
}
}
impl DescriptorSetLayoutBinding {
pub fn builder<'a>() -> DescriptorSetLayoutBindingBuilder<'a> {
DescriptorSetLayoutBindingBuilder {
inner: DescriptorSetLayoutBinding::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetLayoutBindingBuilder<'a> {
inner: DescriptorSetLayoutBinding,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DescriptorSetLayoutBindingBuilder<'a> {
type Target = DescriptorSetLayoutBinding;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutBindingBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetLayoutBindingBuilder<'a> {
pub fn binding(mut self, binding: u32) -> DescriptorSetLayoutBindingBuilder<'a> {
self.inner.binding = binding;
self
}
pub fn descriptor_type(
mut self,
descriptor_type: DescriptorType,
) -> DescriptorSetLayoutBindingBuilder<'a> {
self.inner.descriptor_type = descriptor_type;
self
}
pub fn descriptor_count(
mut self,
descriptor_count: u32,
) -> DescriptorSetLayoutBindingBuilder<'a> {
self.inner.descriptor_count = descriptor_count;
self
}
pub fn stage_flags(
mut self,
stage_flags: ShaderStageFlags,
) -> DescriptorSetLayoutBindingBuilder<'a> {
self.inner.stage_flags = stage_flags;
self
}
pub fn immutable_samplers(
mut self,
immutable_samplers: &'a [Sampler],
) -> DescriptorSetLayoutBindingBuilder<'a> {
self.inner.descriptor_count = immutable_samplers.len() as _;
self.inner.p_immutable_samplers = immutable_samplers.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetLayoutBinding {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayoutCreateInfo.html>"]
pub struct DescriptorSetLayoutCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DescriptorSetLayoutCreateFlags,
pub binding_count: u32,
pub p_bindings: *const DescriptorSetLayoutBinding,
}
impl ::std::default::Default for DescriptorSetLayoutCreateInfo {
fn default() -> DescriptorSetLayoutCreateInfo {
DescriptorSetLayoutCreateInfo {
s_type: StructureType::DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: DescriptorSetLayoutCreateFlags::default(),
binding_count: u32::default(),
p_bindings: ::std::ptr::null(),
}
}
}
impl DescriptorSetLayoutCreateInfo {
pub fn builder<'a>() -> DescriptorSetLayoutCreateInfoBuilder<'a> {
DescriptorSetLayoutCreateInfoBuilder {
inner: DescriptorSetLayoutCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetLayoutCreateInfoBuilder<'a> {
inner: DescriptorSetLayoutCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDescriptorSetLayoutCreateInfo {}
impl<'a> ::std::ops::Deref for DescriptorSetLayoutCreateInfoBuilder<'a> {
type Target = DescriptorSetLayoutCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetLayoutCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: DescriptorSetLayoutCreateFlags,
) -> DescriptorSetLayoutCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn bindings(
mut self,
bindings: &'a [DescriptorSetLayoutBinding],
) -> DescriptorSetLayoutCreateInfoBuilder<'a> {
self.inner.binding_count = bindings.len() as _;
self.inner.p_bindings = bindings.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDescriptorSetLayoutCreateInfo>(
mut self,
next: &'a mut T,
) -> DescriptorSetLayoutCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetLayoutCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPoolSize.html>"]
pub struct DescriptorPoolSize {
pub ty: DescriptorType,
pub descriptor_count: u32,
}
impl DescriptorPoolSize {
pub fn builder<'a>() -> DescriptorPoolSizeBuilder<'a> {
DescriptorPoolSizeBuilder {
inner: DescriptorPoolSize::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorPoolSizeBuilder<'a> {
inner: DescriptorPoolSize,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DescriptorPoolSizeBuilder<'a> {
type Target = DescriptorPoolSize;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorPoolSizeBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorPoolSizeBuilder<'a> {
pub fn ty(mut self, ty: DescriptorType) -> DescriptorPoolSizeBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn descriptor_count(mut self, descriptor_count: u32) -> DescriptorPoolSizeBuilder<'a> {
self.inner.descriptor_count = descriptor_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorPoolSize {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPoolCreateInfo.html>"]
pub struct DescriptorPoolCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DescriptorPoolCreateFlags,
pub max_sets: u32,
pub pool_size_count: u32,
pub p_pool_sizes: *const DescriptorPoolSize,
}
impl ::std::default::Default for DescriptorPoolCreateInfo {
fn default() -> DescriptorPoolCreateInfo {
DescriptorPoolCreateInfo {
s_type: StructureType::DESCRIPTOR_POOL_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: DescriptorPoolCreateFlags::default(),
max_sets: u32::default(),
pool_size_count: u32::default(),
p_pool_sizes: ::std::ptr::null(),
}
}
}
impl DescriptorPoolCreateInfo {
pub fn builder<'a>() -> DescriptorPoolCreateInfoBuilder<'a> {
DescriptorPoolCreateInfoBuilder {
inner: DescriptorPoolCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorPoolCreateInfoBuilder<'a> {
inner: DescriptorPoolCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDescriptorPoolCreateInfo {}
impl<'a> ::std::ops::Deref for DescriptorPoolCreateInfoBuilder<'a> {
type Target = DescriptorPoolCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorPoolCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorPoolCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: DescriptorPoolCreateFlags,
) -> DescriptorPoolCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn max_sets(mut self, max_sets: u32) -> DescriptorPoolCreateInfoBuilder<'a> {
self.inner.max_sets = max_sets;
self
}
pub fn pool_sizes(
mut self,
pool_sizes: &'a [DescriptorPoolSize],
) -> DescriptorPoolCreateInfoBuilder<'a> {
self.inner.pool_size_count = pool_sizes.len() as _;
self.inner.p_pool_sizes = pool_sizes.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDescriptorPoolCreateInfo>(
mut self,
next: &'a mut T,
) -> DescriptorPoolCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorPoolCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetAllocateInfo.html>"]
pub struct DescriptorSetAllocateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub descriptor_pool: DescriptorPool,
pub descriptor_set_count: u32,
pub p_set_layouts: *const DescriptorSetLayout,
}
impl ::std::default::Default for DescriptorSetAllocateInfo {
fn default() -> DescriptorSetAllocateInfo {
DescriptorSetAllocateInfo {
s_type: StructureType::DESCRIPTOR_SET_ALLOCATE_INFO,
p_next: ::std::ptr::null(),
descriptor_pool: DescriptorPool::default(),
descriptor_set_count: u32::default(),
p_set_layouts: ::std::ptr::null(),
}
}
}
impl DescriptorSetAllocateInfo {
pub fn builder<'a>() -> DescriptorSetAllocateInfoBuilder<'a> {
DescriptorSetAllocateInfoBuilder {
inner: DescriptorSetAllocateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetAllocateInfoBuilder<'a> {
inner: DescriptorSetAllocateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDescriptorSetAllocateInfo {}
impl<'a> ::std::ops::Deref for DescriptorSetAllocateInfoBuilder<'a> {
type Target = DescriptorSetAllocateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetAllocateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetAllocateInfoBuilder<'a> {
pub fn descriptor_pool(
mut self,
descriptor_pool: DescriptorPool,
) -> DescriptorSetAllocateInfoBuilder<'a> {
self.inner.descriptor_pool = descriptor_pool;
self
}
pub fn set_layouts(
mut self,
set_layouts: &'a [DescriptorSetLayout],
) -> DescriptorSetAllocateInfoBuilder<'a> {
self.inner.descriptor_set_count = set_layouts.len() as _;
self.inner.p_set_layouts = set_layouts.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDescriptorSetAllocateInfo>(
mut self,
next: &'a mut T,
) -> DescriptorSetAllocateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetAllocateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSpecializationMapEntry.html>"]
pub struct SpecializationMapEntry {
pub constant_id: u32,
pub offset: u32,
pub size: usize,
}
impl SpecializationMapEntry {
pub fn builder<'a>() -> SpecializationMapEntryBuilder<'a> {
SpecializationMapEntryBuilder {
inner: SpecializationMapEntry::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SpecializationMapEntryBuilder<'a> {
inner: SpecializationMapEntry,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SpecializationMapEntryBuilder<'a> {
type Target = SpecializationMapEntry;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SpecializationMapEntryBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SpecializationMapEntryBuilder<'a> {
pub fn constant_id(mut self, constant_id: u32) -> SpecializationMapEntryBuilder<'a> {
self.inner.constant_id = constant_id;
self
}
pub fn offset(mut self, offset: u32) -> SpecializationMapEntryBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn size(mut self, size: usize) -> SpecializationMapEntryBuilder<'a> {
self.inner.size = size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SpecializationMapEntry {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSpecializationInfo.html>"]
pub struct SpecializationInfo {
pub map_entry_count: u32,
pub p_map_entries: *const SpecializationMapEntry,
pub data_size: usize,
pub p_data: *const c_void,
}
impl ::std::default::Default for SpecializationInfo {
fn default() -> SpecializationInfo {
SpecializationInfo {
map_entry_count: u32::default(),
p_map_entries: ::std::ptr::null(),
data_size: usize::default(),
p_data: ::std::ptr::null(),
}
}
}
impl SpecializationInfo {
pub fn builder<'a>() -> SpecializationInfoBuilder<'a> {
SpecializationInfoBuilder {
inner: SpecializationInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SpecializationInfoBuilder<'a> {
inner: SpecializationInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SpecializationInfoBuilder<'a> {
type Target = SpecializationInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SpecializationInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SpecializationInfoBuilder<'a> {
pub fn map_entries(
mut self,
map_entries: &'a [SpecializationMapEntry],
) -> SpecializationInfoBuilder<'a> {
self.inner.map_entry_count = map_entries.len() as _;
self.inner.p_map_entries = map_entries.as_ptr();
self
}
pub fn data(mut self, data: &'a [u8]) -> SpecializationInfoBuilder<'a> {
self.inner.data_size = data.len() as _;
self.inner.p_data = data.as_ptr() as *const c_void;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SpecializationInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineShaderStageCreateInfo.html>"]
pub struct PipelineShaderStageCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineShaderStageCreateFlags,
pub stage: ShaderStageFlags,
pub module: ShaderModule,
pub p_name: *const c_char,
pub p_specialization_info: *const SpecializationInfo,
}
impl ::std::default::Default for PipelineShaderStageCreateInfo {
fn default() -> PipelineShaderStageCreateInfo {
PipelineShaderStageCreateInfo {
s_type: StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineShaderStageCreateFlags::default(),
stage: ShaderStageFlags::default(),
module: ShaderModule::default(),
p_name: ::std::ptr::null(),
p_specialization_info: ::std::ptr::null(),
}
}
}
impl PipelineShaderStageCreateInfo {
pub fn builder<'a>() -> PipelineShaderStageCreateInfoBuilder<'a> {
PipelineShaderStageCreateInfoBuilder {
inner: PipelineShaderStageCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineShaderStageCreateInfoBuilder<'a> {
inner: PipelineShaderStageCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineShaderStageCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineShaderStageCreateInfoBuilder<'a> {
type Target = PipelineShaderStageCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineShaderStageCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineShaderStageCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineShaderStageCreateFlags,
) -> PipelineShaderStageCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn stage(mut self, stage: ShaderStageFlags) -> PipelineShaderStageCreateInfoBuilder<'a> {
self.inner.stage = stage;
self
}
pub fn module(mut self, module: ShaderModule) -> PipelineShaderStageCreateInfoBuilder<'a> {
self.inner.module = module;
self
}
pub fn name(mut self, name: &'a ::std::ffi::CStr) -> PipelineShaderStageCreateInfoBuilder<'a> {
self.inner.p_name = name.as_ptr();
self
}
pub fn specialization_info(
mut self,
specialization_info: &'a SpecializationInfo,
) -> PipelineShaderStageCreateInfoBuilder<'a> {
self.inner.p_specialization_info = specialization_info;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineShaderStageCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineShaderStageCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineShaderStageCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkComputePipelineCreateInfo.html>"]
pub struct ComputePipelineCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCreateFlags,
pub stage: PipelineShaderStageCreateInfo,
pub layout: PipelineLayout,
pub base_pipeline_handle: Pipeline,
pub base_pipeline_index: i32,
}
impl ::std::default::Default for ComputePipelineCreateInfo {
fn default() -> ComputePipelineCreateInfo {
ComputePipelineCreateInfo {
s_type: StructureType::COMPUTE_PIPELINE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineCreateFlags::default(),
stage: PipelineShaderStageCreateInfo::default(),
layout: PipelineLayout::default(),
base_pipeline_handle: Pipeline::default(),
base_pipeline_index: i32::default(),
}
}
}
impl ComputePipelineCreateInfo {
pub fn builder<'a>() -> ComputePipelineCreateInfoBuilder<'a> {
ComputePipelineCreateInfoBuilder {
inner: ComputePipelineCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ComputePipelineCreateInfoBuilder<'a> {
inner: ComputePipelineCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsComputePipelineCreateInfo {}
impl<'a> ::std::ops::Deref for ComputePipelineCreateInfoBuilder<'a> {
type Target = ComputePipelineCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ComputePipelineCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ComputePipelineCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: PipelineCreateFlags) -> ComputePipelineCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn stage(
mut self,
stage: PipelineShaderStageCreateInfo,
) -> ComputePipelineCreateInfoBuilder<'a> {
self.inner.stage = stage;
self
}
pub fn layout(mut self, layout: PipelineLayout) -> ComputePipelineCreateInfoBuilder<'a> {
self.inner.layout = layout;
self
}
pub fn base_pipeline_handle(
mut self,
base_pipeline_handle: Pipeline,
) -> ComputePipelineCreateInfoBuilder<'a> {
self.inner.base_pipeline_handle = base_pipeline_handle;
self
}
pub fn base_pipeline_index(
mut self,
base_pipeline_index: i32,
) -> ComputePipelineCreateInfoBuilder<'a> {
self.inner.base_pipeline_index = base_pipeline_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsComputePipelineCreateInfo>(
mut self,
next: &'a mut T,
) -> ComputePipelineCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ComputePipelineCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkVertexInputBindingDescription.html>"]
pub struct VertexInputBindingDescription {
pub binding: u32,
pub stride: u32,
pub input_rate: VertexInputRate,
}
impl VertexInputBindingDescription {
pub fn builder<'a>() -> VertexInputBindingDescriptionBuilder<'a> {
VertexInputBindingDescriptionBuilder {
inner: VertexInputBindingDescription::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct VertexInputBindingDescriptionBuilder<'a> {
inner: VertexInputBindingDescription,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for VertexInputBindingDescriptionBuilder<'a> {
type Target = VertexInputBindingDescription;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for VertexInputBindingDescriptionBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> VertexInputBindingDescriptionBuilder<'a> {
pub fn binding(mut self, binding: u32) -> VertexInputBindingDescriptionBuilder<'a> {
self.inner.binding = binding;
self
}
pub fn stride(mut self, stride: u32) -> VertexInputBindingDescriptionBuilder<'a> {
self.inner.stride = stride;
self
}
pub fn input_rate(
mut self,
input_rate: VertexInputRate,
) -> VertexInputBindingDescriptionBuilder<'a> {
self.inner.input_rate = input_rate;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> VertexInputBindingDescription {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkVertexInputAttributeDescription.html>"]
pub struct VertexInputAttributeDescription {
pub location: u32,
pub binding: u32,
pub format: Format,
pub offset: u32,
}
impl VertexInputAttributeDescription {
pub fn builder<'a>() -> VertexInputAttributeDescriptionBuilder<'a> {
VertexInputAttributeDescriptionBuilder {
inner: VertexInputAttributeDescription::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct VertexInputAttributeDescriptionBuilder<'a> {
inner: VertexInputAttributeDescription,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for VertexInputAttributeDescriptionBuilder<'a> {
type Target = VertexInputAttributeDescription;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for VertexInputAttributeDescriptionBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> VertexInputAttributeDescriptionBuilder<'a> {
pub fn location(mut self, location: u32) -> VertexInputAttributeDescriptionBuilder<'a> {
self.inner.location = location;
self
}
pub fn binding(mut self, binding: u32) -> VertexInputAttributeDescriptionBuilder<'a> {
self.inner.binding = binding;
self
}
pub fn format(mut self, format: Format) -> VertexInputAttributeDescriptionBuilder<'a> {
self.inner.format = format;
self
}
pub fn offset(mut self, offset: u32) -> VertexInputAttributeDescriptionBuilder<'a> {
self.inner.offset = offset;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> VertexInputAttributeDescription {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineVertexInputStateCreateInfo.html>"]
pub struct PipelineVertexInputStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineVertexInputStateCreateFlags,
pub vertex_binding_description_count: u32,
pub p_vertex_binding_descriptions: *const VertexInputBindingDescription,
pub vertex_attribute_description_count: u32,
pub p_vertex_attribute_descriptions: *const VertexInputAttributeDescription,
}
impl ::std::default::Default for PipelineVertexInputStateCreateInfo {
fn default() -> PipelineVertexInputStateCreateInfo {
PipelineVertexInputStateCreateInfo {
s_type: StructureType::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineVertexInputStateCreateFlags::default(),
vertex_binding_description_count: u32::default(),
p_vertex_binding_descriptions: ::std::ptr::null(),
vertex_attribute_description_count: u32::default(),
p_vertex_attribute_descriptions: ::std::ptr::null(),
}
}
}
impl PipelineVertexInputStateCreateInfo {
pub fn builder<'a>() -> PipelineVertexInputStateCreateInfoBuilder<'a> {
PipelineVertexInputStateCreateInfoBuilder {
inner: PipelineVertexInputStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineVertexInputStateCreateInfoBuilder<'a> {
inner: PipelineVertexInputStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineVertexInputStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineVertexInputStateCreateInfoBuilder<'a> {
type Target = PipelineVertexInputStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineVertexInputStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineVertexInputStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineVertexInputStateCreateFlags,
) -> PipelineVertexInputStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn vertex_binding_descriptions(
mut self,
vertex_binding_descriptions: &'a [VertexInputBindingDescription],
) -> PipelineVertexInputStateCreateInfoBuilder<'a> {
self.inner.vertex_binding_description_count = vertex_binding_descriptions.len() as _;
self.inner.p_vertex_binding_descriptions = vertex_binding_descriptions.as_ptr();
self
}
pub fn vertex_attribute_descriptions(
mut self,
vertex_attribute_descriptions: &'a [VertexInputAttributeDescription],
) -> PipelineVertexInputStateCreateInfoBuilder<'a> {
self.inner.vertex_attribute_description_count = vertex_attribute_descriptions.len() as _;
self.inner.p_vertex_attribute_descriptions = vertex_attribute_descriptions.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineVertexInputStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineVertexInputStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineVertexInputStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineInputAssemblyStateCreateInfo.html>"]
pub struct PipelineInputAssemblyStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineInputAssemblyStateCreateFlags,
pub topology: PrimitiveTopology,
pub primitive_restart_enable: Bool32,
}
impl ::std::default::Default for PipelineInputAssemblyStateCreateInfo {
fn default() -> PipelineInputAssemblyStateCreateInfo {
PipelineInputAssemblyStateCreateInfo {
s_type: StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineInputAssemblyStateCreateFlags::default(),
topology: PrimitiveTopology::default(),
primitive_restart_enable: Bool32::default(),
}
}
}
impl PipelineInputAssemblyStateCreateInfo {
pub fn builder<'a>() -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
PipelineInputAssemblyStateCreateInfoBuilder {
inner: PipelineInputAssemblyStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineInputAssemblyStateCreateInfoBuilder<'a> {
inner: PipelineInputAssemblyStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineInputAssemblyStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineInputAssemblyStateCreateInfoBuilder<'a> {
type Target = PipelineInputAssemblyStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineInputAssemblyStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineInputAssemblyStateCreateFlags,
) -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn topology(
mut self,
topology: PrimitiveTopology,
) -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
self.inner.topology = topology;
self
}
pub fn primitive_restart_enable(
mut self,
primitive_restart_enable: bool,
) -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
self.inner.primitive_restart_enable = primitive_restart_enable.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineInputAssemblyStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineInputAssemblyStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineTessellationStateCreateInfo.html>"]
pub struct PipelineTessellationStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineTessellationStateCreateFlags,
pub patch_control_points: u32,
}
impl ::std::default::Default for PipelineTessellationStateCreateInfo {
fn default() -> PipelineTessellationStateCreateInfo {
PipelineTessellationStateCreateInfo {
s_type: StructureType::PIPELINE_TESSELLATION_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineTessellationStateCreateFlags::default(),
patch_control_points: u32::default(),
}
}
}
impl PipelineTessellationStateCreateInfo {
pub fn builder<'a>() -> PipelineTessellationStateCreateInfoBuilder<'a> {
PipelineTessellationStateCreateInfoBuilder {
inner: PipelineTessellationStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineTessellationStateCreateInfoBuilder<'a> {
inner: PipelineTessellationStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineTessellationStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineTessellationStateCreateInfoBuilder<'a> {
type Target = PipelineTessellationStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineTessellationStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineTessellationStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineTessellationStateCreateFlags,
) -> PipelineTessellationStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn patch_control_points(
mut self,
patch_control_points: u32,
) -> PipelineTessellationStateCreateInfoBuilder<'a> {
self.inner.patch_control_points = patch_control_points;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineTessellationStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineTessellationStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineTessellationStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportStateCreateInfo.html>"]
pub struct PipelineViewportStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineViewportStateCreateFlags,
pub viewport_count: u32,
pub p_viewports: *const Viewport,
pub scissor_count: u32,
pub p_scissors: *const Rect2D,
}
impl ::std::default::Default for PipelineViewportStateCreateInfo {
fn default() -> PipelineViewportStateCreateInfo {
PipelineViewportStateCreateInfo {
s_type: StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineViewportStateCreateFlags::default(),
viewport_count: u32::default(),
p_viewports: ::std::ptr::null(),
scissor_count: u32::default(),
p_scissors: ::std::ptr::null(),
}
}
}
impl PipelineViewportStateCreateInfo {
pub fn builder<'a>() -> PipelineViewportStateCreateInfoBuilder<'a> {
PipelineViewportStateCreateInfoBuilder {
inner: PipelineViewportStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportStateCreateInfoBuilder<'a> {
inner: PipelineViewportStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineViewportStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineViewportStateCreateInfoBuilder<'a> {
type Target = PipelineViewportStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineViewportStateCreateFlags,
) -> PipelineViewportStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn viewport_count(
mut self,
viewport_count: u32,
) -> PipelineViewportStateCreateInfoBuilder<'a> {
self.inner.viewport_count = viewport_count;
self
}
pub fn viewports(
mut self,
viewports: &'a [Viewport],
) -> PipelineViewportStateCreateInfoBuilder<'a> {
self.inner.viewport_count = viewports.len() as _;
self.inner.p_viewports = viewports.as_ptr();
self
}
pub fn scissor_count(
mut self,
scissor_count: u32,
) -> PipelineViewportStateCreateInfoBuilder<'a> {
self.inner.scissor_count = scissor_count;
self
}
pub fn scissors(
mut self,
scissors: &'a [Rect2D],
) -> PipelineViewportStateCreateInfoBuilder<'a> {
self.inner.scissor_count = scissors.len() as _;
self.inner.p_scissors = scissors.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineViewportStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineViewportStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationStateCreateInfo.html>"]
pub struct PipelineRasterizationStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineRasterizationStateCreateFlags,
pub depth_clamp_enable: Bool32,
pub rasterizer_discard_enable: Bool32,
pub polygon_mode: PolygonMode,
pub cull_mode: CullModeFlags,
pub front_face: FrontFace,
pub depth_bias_enable: Bool32,
pub depth_bias_constant_factor: f32,
pub depth_bias_clamp: f32,
pub depth_bias_slope_factor: f32,
pub line_width: f32,
}
impl ::std::default::Default for PipelineRasterizationStateCreateInfo {
fn default() -> PipelineRasterizationStateCreateInfo {
PipelineRasterizationStateCreateInfo {
s_type: StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineRasterizationStateCreateFlags::default(),
depth_clamp_enable: Bool32::default(),
rasterizer_discard_enable: Bool32::default(),
polygon_mode: PolygonMode::default(),
cull_mode: CullModeFlags::default(),
front_face: FrontFace::default(),
depth_bias_enable: Bool32::default(),
depth_bias_constant_factor: f32::default(),
depth_bias_clamp: f32::default(),
depth_bias_slope_factor: f32::default(),
line_width: f32::default(),
}
}
}
impl PipelineRasterizationStateCreateInfo {
pub fn builder<'a>() -> PipelineRasterizationStateCreateInfoBuilder<'a> {
PipelineRasterizationStateCreateInfoBuilder {
inner: PipelineRasterizationStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineRasterizationStateCreateInfoBuilder<'a> {
inner: PipelineRasterizationStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineRasterizationStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineRasterizationStateCreateInfoBuilder<'a> {
type Target = PipelineRasterizationStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineRasterizationStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineRasterizationStateCreateFlags,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn depth_clamp_enable(
mut self,
depth_clamp_enable: bool,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.depth_clamp_enable = depth_clamp_enable.into();
self
}
pub fn rasterizer_discard_enable(
mut self,
rasterizer_discard_enable: bool,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.rasterizer_discard_enable = rasterizer_discard_enable.into();
self
}
pub fn polygon_mode(
mut self,
polygon_mode: PolygonMode,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.polygon_mode = polygon_mode;
self
}
pub fn cull_mode(
mut self,
cull_mode: CullModeFlags,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.cull_mode = cull_mode;
self
}
pub fn front_face(
mut self,
front_face: FrontFace,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.front_face = front_face;
self
}
pub fn depth_bias_enable(
mut self,
depth_bias_enable: bool,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.depth_bias_enable = depth_bias_enable.into();
self
}
pub fn depth_bias_constant_factor(
mut self,
depth_bias_constant_factor: f32,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.depth_bias_constant_factor = depth_bias_constant_factor;
self
}
pub fn depth_bias_clamp(
mut self,
depth_bias_clamp: f32,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.depth_bias_clamp = depth_bias_clamp;
self
}
pub fn depth_bias_slope_factor(
mut self,
depth_bias_slope_factor: f32,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.depth_bias_slope_factor = depth_bias_slope_factor;
self
}
pub fn line_width(
mut self,
line_width: f32,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
self.inner.line_width = line_width;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineRasterizationStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineRasterizationStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineRasterizationStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineMultisampleStateCreateInfo.html>"]
pub struct PipelineMultisampleStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineMultisampleStateCreateFlags,
pub rasterization_samples: SampleCountFlags,
pub sample_shading_enable: Bool32,
pub min_sample_shading: f32,
pub p_sample_mask: *const SampleMask,
pub alpha_to_coverage_enable: Bool32,
pub alpha_to_one_enable: Bool32,
}
impl ::std::default::Default for PipelineMultisampleStateCreateInfo {
fn default() -> PipelineMultisampleStateCreateInfo {
PipelineMultisampleStateCreateInfo {
s_type: StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineMultisampleStateCreateFlags::default(),
rasterization_samples: SampleCountFlags::default(),
sample_shading_enable: Bool32::default(),
min_sample_shading: f32::default(),
p_sample_mask: ::std::ptr::null(),
alpha_to_coverage_enable: Bool32::default(),
alpha_to_one_enable: Bool32::default(),
}
}
}
impl PipelineMultisampleStateCreateInfo {
pub fn builder<'a>() -> PipelineMultisampleStateCreateInfoBuilder<'a> {
PipelineMultisampleStateCreateInfoBuilder {
inner: PipelineMultisampleStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineMultisampleStateCreateInfoBuilder<'a> {
inner: PipelineMultisampleStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineMultisampleStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineMultisampleStateCreateInfoBuilder<'a> {
type Target = PipelineMultisampleStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineMultisampleStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineMultisampleStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineMultisampleStateCreateFlags,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn rasterization_samples(
mut self,
rasterization_samples: SampleCountFlags,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.rasterization_samples = rasterization_samples;
self
}
pub fn sample_shading_enable(
mut self,
sample_shading_enable: bool,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.sample_shading_enable = sample_shading_enable.into();
self
}
pub fn min_sample_shading(
mut self,
min_sample_shading: f32,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.min_sample_shading = min_sample_shading;
self
}
pub fn sample_mask(
mut self,
sample_mask: &'a [SampleMask],
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.p_sample_mask = sample_mask.as_ptr() as *const SampleMask;
self
}
pub fn alpha_to_coverage_enable(
mut self,
alpha_to_coverage_enable: bool,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.alpha_to_coverage_enable = alpha_to_coverage_enable.into();
self
}
pub fn alpha_to_one_enable(
mut self,
alpha_to_one_enable: bool,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
self.inner.alpha_to_one_enable = alpha_to_one_enable.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineMultisampleStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineMultisampleStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineMultisampleStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineColorBlendAttachmentState.html>"]
pub struct PipelineColorBlendAttachmentState {
pub blend_enable: Bool32,
pub src_color_blend_factor: BlendFactor,
pub dst_color_blend_factor: BlendFactor,
pub color_blend_op: BlendOp,
pub src_alpha_blend_factor: BlendFactor,
pub dst_alpha_blend_factor: BlendFactor,
pub alpha_blend_op: BlendOp,
pub color_write_mask: ColorComponentFlags,
}
impl PipelineColorBlendAttachmentState {
pub fn builder<'a>() -> PipelineColorBlendAttachmentStateBuilder<'a> {
PipelineColorBlendAttachmentStateBuilder {
inner: PipelineColorBlendAttachmentState::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineColorBlendAttachmentStateBuilder<'a> {
inner: PipelineColorBlendAttachmentState,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PipelineColorBlendAttachmentStateBuilder<'a> {
type Target = PipelineColorBlendAttachmentState;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineColorBlendAttachmentStateBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineColorBlendAttachmentStateBuilder<'a> {
pub fn blend_enable(
mut self,
blend_enable: bool,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.blend_enable = blend_enable.into();
self
}
pub fn src_color_blend_factor(
mut self,
src_color_blend_factor: BlendFactor,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.src_color_blend_factor = src_color_blend_factor;
self
}
pub fn dst_color_blend_factor(
mut self,
dst_color_blend_factor: BlendFactor,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.dst_color_blend_factor = dst_color_blend_factor;
self
}
pub fn color_blend_op(
mut self,
color_blend_op: BlendOp,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.color_blend_op = color_blend_op;
self
}
pub fn src_alpha_blend_factor(
mut self,
src_alpha_blend_factor: BlendFactor,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.src_alpha_blend_factor = src_alpha_blend_factor;
self
}
pub fn dst_alpha_blend_factor(
mut self,
dst_alpha_blend_factor: BlendFactor,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.dst_alpha_blend_factor = dst_alpha_blend_factor;
self
}
pub fn alpha_blend_op(
mut self,
alpha_blend_op: BlendOp,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.alpha_blend_op = alpha_blend_op;
self
}
pub fn color_write_mask(
mut self,
color_write_mask: ColorComponentFlags,
) -> PipelineColorBlendAttachmentStateBuilder<'a> {
self.inner.color_write_mask = color_write_mask;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineColorBlendAttachmentState {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineColorBlendStateCreateInfo.html>"]
pub struct PipelineColorBlendStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineColorBlendStateCreateFlags,
pub logic_op_enable: Bool32,
pub logic_op: LogicOp,
pub attachment_count: u32,
pub p_attachments: *const PipelineColorBlendAttachmentState,
pub blend_constants: [f32; 4],
}
impl ::std::default::Default for PipelineColorBlendStateCreateInfo {
fn default() -> PipelineColorBlendStateCreateInfo {
PipelineColorBlendStateCreateInfo {
s_type: StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineColorBlendStateCreateFlags::default(),
logic_op_enable: Bool32::default(),
logic_op: LogicOp::default(),
attachment_count: u32::default(),
p_attachments: ::std::ptr::null(),
blend_constants: unsafe { ::std::mem::zeroed() },
}
}
}
impl PipelineColorBlendStateCreateInfo {
pub fn builder<'a>() -> PipelineColorBlendStateCreateInfoBuilder<'a> {
PipelineColorBlendStateCreateInfoBuilder {
inner: PipelineColorBlendStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineColorBlendStateCreateInfoBuilder<'a> {
inner: PipelineColorBlendStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineColorBlendStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineColorBlendStateCreateInfoBuilder<'a> {
type Target = PipelineColorBlendStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineColorBlendStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineColorBlendStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineColorBlendStateCreateFlags,
) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn logic_op_enable(
mut self,
logic_op_enable: bool,
) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
self.inner.logic_op_enable = logic_op_enable.into();
self
}
pub fn logic_op(mut self, logic_op: LogicOp) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
self.inner.logic_op = logic_op;
self
}
pub fn attachments(
mut self,
attachments: &'a [PipelineColorBlendAttachmentState],
) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
self.inner.attachment_count = attachments.len() as _;
self.inner.p_attachments = attachments.as_ptr();
self
}
pub fn blend_constants(
mut self,
blend_constants: [f32; 4],
) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
self.inner.blend_constants = blend_constants;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineColorBlendStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineColorBlendStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineColorBlendStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDynamicStateCreateInfo.html>"]
pub struct PipelineDynamicStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineDynamicStateCreateFlags,
pub dynamic_state_count: u32,
pub p_dynamic_states: *const DynamicState,
}
impl ::std::default::Default for PipelineDynamicStateCreateInfo {
fn default() -> PipelineDynamicStateCreateInfo {
PipelineDynamicStateCreateInfo {
s_type: StructureType::PIPELINE_DYNAMIC_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineDynamicStateCreateFlags::default(),
dynamic_state_count: u32::default(),
p_dynamic_states: ::std::ptr::null(),
}
}
}
impl PipelineDynamicStateCreateInfo {
pub fn builder<'a>() -> PipelineDynamicStateCreateInfoBuilder<'a> {
PipelineDynamicStateCreateInfoBuilder {
inner: PipelineDynamicStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineDynamicStateCreateInfoBuilder<'a> {
inner: PipelineDynamicStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineDynamicStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineDynamicStateCreateInfoBuilder<'a> {
type Target = PipelineDynamicStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineDynamicStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineDynamicStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineDynamicStateCreateFlags,
) -> PipelineDynamicStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn dynamic_states(
mut self,
dynamic_states: &'a [DynamicState],
) -> PipelineDynamicStateCreateInfoBuilder<'a> {
self.inner.dynamic_state_count = dynamic_states.len() as _;
self.inner.p_dynamic_states = dynamic_states.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineDynamicStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineDynamicStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineDynamicStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkStencilOpState.html>"]
pub struct StencilOpState {
pub fail_op: StencilOp,
pub pass_op: StencilOp,
pub depth_fail_op: StencilOp,
pub compare_op: CompareOp,
pub compare_mask: u32,
pub write_mask: u32,
pub reference: u32,
}
impl StencilOpState {
pub fn builder<'a>() -> StencilOpStateBuilder<'a> {
StencilOpStateBuilder {
inner: StencilOpState::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct StencilOpStateBuilder<'a> {
inner: StencilOpState,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for StencilOpStateBuilder<'a> {
type Target = StencilOpState;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for StencilOpStateBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> StencilOpStateBuilder<'a> {
pub fn fail_op(mut self, fail_op: StencilOp) -> StencilOpStateBuilder<'a> {
self.inner.fail_op = fail_op;
self
}
pub fn pass_op(mut self, pass_op: StencilOp) -> StencilOpStateBuilder<'a> {
self.inner.pass_op = pass_op;
self
}
pub fn depth_fail_op(mut self, depth_fail_op: StencilOp) -> StencilOpStateBuilder<'a> {
self.inner.depth_fail_op = depth_fail_op;
self
}
pub fn compare_op(mut self, compare_op: CompareOp) -> StencilOpStateBuilder<'a> {
self.inner.compare_op = compare_op;
self
}
pub fn compare_mask(mut self, compare_mask: u32) -> StencilOpStateBuilder<'a> {
self.inner.compare_mask = compare_mask;
self
}
pub fn write_mask(mut self, write_mask: u32) -> StencilOpStateBuilder<'a> {
self.inner.write_mask = write_mask;
self
}
pub fn reference(mut self, reference: u32) -> StencilOpStateBuilder<'a> {
self.inner.reference = reference;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> StencilOpState {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDepthStencilStateCreateInfo.html>"]
pub struct PipelineDepthStencilStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineDepthStencilStateCreateFlags,
pub depth_test_enable: Bool32,
pub depth_write_enable: Bool32,
pub depth_compare_op: CompareOp,
pub depth_bounds_test_enable: Bool32,
pub stencil_test_enable: Bool32,
pub front: StencilOpState,
pub back: StencilOpState,
pub min_depth_bounds: f32,
pub max_depth_bounds: f32,
}
impl ::std::default::Default for PipelineDepthStencilStateCreateInfo {
fn default() -> PipelineDepthStencilStateCreateInfo {
PipelineDepthStencilStateCreateInfo {
s_type: StructureType::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineDepthStencilStateCreateFlags::default(),
depth_test_enable: Bool32::default(),
depth_write_enable: Bool32::default(),
depth_compare_op: CompareOp::default(),
depth_bounds_test_enable: Bool32::default(),
stencil_test_enable: Bool32::default(),
front: StencilOpState::default(),
back: StencilOpState::default(),
min_depth_bounds: f32::default(),
max_depth_bounds: f32::default(),
}
}
}
impl PipelineDepthStencilStateCreateInfo {
pub fn builder<'a>() -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
PipelineDepthStencilStateCreateInfoBuilder {
inner: PipelineDepthStencilStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineDepthStencilStateCreateInfoBuilder<'a> {
inner: PipelineDepthStencilStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineDepthStencilStateCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineDepthStencilStateCreateInfoBuilder<'a> {
type Target = PipelineDepthStencilStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineDepthStencilStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineDepthStencilStateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineDepthStencilStateCreateFlags,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn depth_test_enable(
mut self,
depth_test_enable: bool,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.depth_test_enable = depth_test_enable.into();
self
}
pub fn depth_write_enable(
mut self,
depth_write_enable: bool,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.depth_write_enable = depth_write_enable.into();
self
}
pub fn depth_compare_op(
mut self,
depth_compare_op: CompareOp,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.depth_compare_op = depth_compare_op;
self
}
pub fn depth_bounds_test_enable(
mut self,
depth_bounds_test_enable: bool,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.depth_bounds_test_enable = depth_bounds_test_enable.into();
self
}
pub fn stencil_test_enable(
mut self,
stencil_test_enable: bool,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.stencil_test_enable = stencil_test_enable.into();
self
}
pub fn front(
mut self,
front: StencilOpState,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.front = front;
self
}
pub fn back(mut self, back: StencilOpState) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.back = back;
self
}
pub fn min_depth_bounds(
mut self,
min_depth_bounds: f32,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.min_depth_bounds = min_depth_bounds;
self
}
pub fn max_depth_bounds(
mut self,
max_depth_bounds: f32,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
self.inner.max_depth_bounds = max_depth_bounds;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineDepthStencilStateCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineDepthStencilStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGraphicsPipelineCreateInfo.html>"]
pub struct GraphicsPipelineCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCreateFlags,
pub stage_count: u32,
pub p_stages: *const PipelineShaderStageCreateInfo,
pub p_vertex_input_state: *const PipelineVertexInputStateCreateInfo,
pub p_input_assembly_state: *const PipelineInputAssemblyStateCreateInfo,
pub p_tessellation_state: *const PipelineTessellationStateCreateInfo,
pub p_viewport_state: *const PipelineViewportStateCreateInfo,
pub p_rasterization_state: *const PipelineRasterizationStateCreateInfo,
pub p_multisample_state: *const PipelineMultisampleStateCreateInfo,
pub p_depth_stencil_state: *const PipelineDepthStencilStateCreateInfo,
pub p_color_blend_state: *const PipelineColorBlendStateCreateInfo,
pub p_dynamic_state: *const PipelineDynamicStateCreateInfo,
pub layout: PipelineLayout,
pub render_pass: RenderPass,
pub subpass: u32,
pub base_pipeline_handle: Pipeline,
pub base_pipeline_index: i32,
}
impl ::std::default::Default for GraphicsPipelineCreateInfo {
fn default() -> GraphicsPipelineCreateInfo {
GraphicsPipelineCreateInfo {
s_type: StructureType::GRAPHICS_PIPELINE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineCreateFlags::default(),
stage_count: u32::default(),
p_stages: ::std::ptr::null(),
p_vertex_input_state: ::std::ptr::null(),
p_input_assembly_state: ::std::ptr::null(),
p_tessellation_state: ::std::ptr::null(),
p_viewport_state: ::std::ptr::null(),
p_rasterization_state: ::std::ptr::null(),
p_multisample_state: ::std::ptr::null(),
p_depth_stencil_state: ::std::ptr::null(),
p_color_blend_state: ::std::ptr::null(),
p_dynamic_state: ::std::ptr::null(),
layout: PipelineLayout::default(),
render_pass: RenderPass::default(),
subpass: u32::default(),
base_pipeline_handle: Pipeline::default(),
base_pipeline_index: i32::default(),
}
}
}
impl GraphicsPipelineCreateInfo {
pub fn builder<'a>() -> GraphicsPipelineCreateInfoBuilder<'a> {
GraphicsPipelineCreateInfoBuilder {
inner: GraphicsPipelineCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct GraphicsPipelineCreateInfoBuilder<'a> {
inner: GraphicsPipelineCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsGraphicsPipelineCreateInfo {}
impl<'a> ::std::ops::Deref for GraphicsPipelineCreateInfoBuilder<'a> {
type Target = GraphicsPipelineCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for GraphicsPipelineCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> GraphicsPipelineCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: PipelineCreateFlags) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn stages(
mut self,
stages: &'a [PipelineShaderStageCreateInfo],
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.stage_count = stages.len() as _;
self.inner.p_stages = stages.as_ptr();
self
}
pub fn vertex_input_state(
mut self,
vertex_input_state: &'a PipelineVertexInputStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_vertex_input_state = vertex_input_state;
self
}
pub fn input_assembly_state(
mut self,
input_assembly_state: &'a PipelineInputAssemblyStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_input_assembly_state = input_assembly_state;
self
}
pub fn tessellation_state(
mut self,
tessellation_state: &'a PipelineTessellationStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_tessellation_state = tessellation_state;
self
}
pub fn viewport_state(
mut self,
viewport_state: &'a PipelineViewportStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_viewport_state = viewport_state;
self
}
pub fn rasterization_state(
mut self,
rasterization_state: &'a PipelineRasterizationStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_rasterization_state = rasterization_state;
self
}
pub fn multisample_state(
mut self,
multisample_state: &'a PipelineMultisampleStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_multisample_state = multisample_state;
self
}
pub fn depth_stencil_state(
mut self,
depth_stencil_state: &'a PipelineDepthStencilStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_depth_stencil_state = depth_stencil_state;
self
}
pub fn color_blend_state(
mut self,
color_blend_state: &'a PipelineColorBlendStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_color_blend_state = color_blend_state;
self
}
pub fn dynamic_state(
mut self,
dynamic_state: &'a PipelineDynamicStateCreateInfo,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.p_dynamic_state = dynamic_state;
self
}
pub fn layout(mut self, layout: PipelineLayout) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.layout = layout;
self
}
pub fn render_pass(mut self, render_pass: RenderPass) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.render_pass = render_pass;
self
}
pub fn subpass(mut self, subpass: u32) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.subpass = subpass;
self
}
pub fn base_pipeline_handle(
mut self,
base_pipeline_handle: Pipeline,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.base_pipeline_handle = base_pipeline_handle;
self
}
pub fn base_pipeline_index(
mut self,
base_pipeline_index: i32,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
self.inner.base_pipeline_index = base_pipeline_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsGraphicsPipelineCreateInfo>(
mut self,
next: &'a mut T,
) -> GraphicsPipelineCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> GraphicsPipelineCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCacheCreateInfo.html>"]
pub struct PipelineCacheCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCacheCreateFlags,
pub initial_data_size: usize,
pub p_initial_data: *const c_void,
}
impl ::std::default::Default for PipelineCacheCreateInfo {
fn default() -> PipelineCacheCreateInfo {
PipelineCacheCreateInfo {
s_type: StructureType::PIPELINE_CACHE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineCacheCreateFlags::default(),
initial_data_size: usize::default(),
p_initial_data: ::std::ptr::null(),
}
}
}
impl PipelineCacheCreateInfo {
pub fn builder<'a>() -> PipelineCacheCreateInfoBuilder<'a> {
PipelineCacheCreateInfoBuilder {
inner: PipelineCacheCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineCacheCreateInfoBuilder<'a> {
inner: PipelineCacheCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineCacheCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineCacheCreateInfoBuilder<'a> {
type Target = PipelineCacheCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineCacheCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineCacheCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: PipelineCacheCreateFlags) -> PipelineCacheCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn initial_data(mut self, initial_data: &'a [u8]) -> PipelineCacheCreateInfoBuilder<'a> {
self.inner.initial_data_size = initial_data.len() as _;
self.inner.p_initial_data = initial_data.as_ptr() as *const c_void;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineCacheCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineCacheCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineCacheCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPushConstantRange.html>"]
pub struct PushConstantRange {
pub stage_flags: ShaderStageFlags,
pub offset: u32,
pub size: u32,
}
impl PushConstantRange {
pub fn builder<'a>() -> PushConstantRangeBuilder<'a> {
PushConstantRangeBuilder {
inner: PushConstantRange::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PushConstantRangeBuilder<'a> {
inner: PushConstantRange,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PushConstantRangeBuilder<'a> {
type Target = PushConstantRange;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PushConstantRangeBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PushConstantRangeBuilder<'a> {
pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> PushConstantRangeBuilder<'a> {
self.inner.stage_flags = stage_flags;
self
}
pub fn offset(mut self, offset: u32) -> PushConstantRangeBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn size(mut self, size: u32) -> PushConstantRangeBuilder<'a> {
self.inner.size = size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PushConstantRange {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineLayoutCreateInfo.html>"]
pub struct PipelineLayoutCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineLayoutCreateFlags,
pub set_layout_count: u32,
pub p_set_layouts: *const DescriptorSetLayout,
pub push_constant_range_count: u32,
pub p_push_constant_ranges: *const PushConstantRange,
}
impl ::std::default::Default for PipelineLayoutCreateInfo {
fn default() -> PipelineLayoutCreateInfo {
PipelineLayoutCreateInfo {
s_type: StructureType::PIPELINE_LAYOUT_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: PipelineLayoutCreateFlags::default(),
set_layout_count: u32::default(),
p_set_layouts: ::std::ptr::null(),
push_constant_range_count: u32::default(),
p_push_constant_ranges: ::std::ptr::null(),
}
}
}
impl PipelineLayoutCreateInfo {
pub fn builder<'a>() -> PipelineLayoutCreateInfoBuilder<'a> {
PipelineLayoutCreateInfoBuilder {
inner: PipelineLayoutCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineLayoutCreateInfoBuilder<'a> {
inner: PipelineLayoutCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPipelineLayoutCreateInfo {}
impl<'a> ::std::ops::Deref for PipelineLayoutCreateInfoBuilder<'a> {
type Target = PipelineLayoutCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineLayoutCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineLayoutCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineLayoutCreateFlags,
) -> PipelineLayoutCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn set_layouts(
mut self,
set_layouts: &'a [DescriptorSetLayout],
) -> PipelineLayoutCreateInfoBuilder<'a> {
self.inner.set_layout_count = set_layouts.len() as _;
self.inner.p_set_layouts = set_layouts.as_ptr();
self
}
pub fn push_constant_ranges(
mut self,
push_constant_ranges: &'a [PushConstantRange],
) -> PipelineLayoutCreateInfoBuilder<'a> {
self.inner.push_constant_range_count = push_constant_ranges.len() as _;
self.inner.p_push_constant_ranges = push_constant_ranges.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPipelineLayoutCreateInfo>(
mut self,
next: &'a mut T,
) -> PipelineLayoutCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineLayoutCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerCreateInfo.html>"]
pub struct SamplerCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: SamplerCreateFlags,
pub mag_filter: Filter,
pub min_filter: Filter,
pub mipmap_mode: SamplerMipmapMode,
pub address_mode_u: SamplerAddressMode,
pub address_mode_v: SamplerAddressMode,
pub address_mode_w: SamplerAddressMode,
pub mip_lod_bias: f32,
pub anisotropy_enable: Bool32,
pub max_anisotropy: f32,
pub compare_enable: Bool32,
pub compare_op: CompareOp,
pub min_lod: f32,
pub max_lod: f32,
pub border_color: BorderColor,
pub unnormalized_coordinates: Bool32,
}
impl ::std::default::Default for SamplerCreateInfo {
fn default() -> SamplerCreateInfo {
SamplerCreateInfo {
s_type: StructureType::SAMPLER_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: SamplerCreateFlags::default(),
mag_filter: Filter::default(),
min_filter: Filter::default(),
mipmap_mode: SamplerMipmapMode::default(),
address_mode_u: SamplerAddressMode::default(),
address_mode_v: SamplerAddressMode::default(),
address_mode_w: SamplerAddressMode::default(),
mip_lod_bias: f32::default(),
anisotropy_enable: Bool32::default(),
max_anisotropy: f32::default(),
compare_enable: Bool32::default(),
compare_op: CompareOp::default(),
min_lod: f32::default(),
max_lod: f32::default(),
border_color: BorderColor::default(),
unnormalized_coordinates: Bool32::default(),
}
}
}
impl SamplerCreateInfo {
pub fn builder<'a>() -> SamplerCreateInfoBuilder<'a> {
SamplerCreateInfoBuilder {
inner: SamplerCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SamplerCreateInfoBuilder<'a> {
inner: SamplerCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSamplerCreateInfo {}
impl<'a> ::std::ops::Deref for SamplerCreateInfoBuilder<'a> {
type Target = SamplerCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SamplerCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SamplerCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: SamplerCreateFlags) -> SamplerCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn mag_filter(mut self, mag_filter: Filter) -> SamplerCreateInfoBuilder<'a> {
self.inner.mag_filter = mag_filter;
self
}
pub fn min_filter(mut self, min_filter: Filter) -> SamplerCreateInfoBuilder<'a> {
self.inner.min_filter = min_filter;
self
}
pub fn mipmap_mode(mut self, mipmap_mode: SamplerMipmapMode) -> SamplerCreateInfoBuilder<'a> {
self.inner.mipmap_mode = mipmap_mode;
self
}
pub fn address_mode_u(
mut self,
address_mode_u: SamplerAddressMode,
) -> SamplerCreateInfoBuilder<'a> {
self.inner.address_mode_u = address_mode_u;
self
}
pub fn address_mode_v(
mut self,
address_mode_v: SamplerAddressMode,
) -> SamplerCreateInfoBuilder<'a> {
self.inner.address_mode_v = address_mode_v;
self
}
pub fn address_mode_w(
mut self,
address_mode_w: SamplerAddressMode,
) -> SamplerCreateInfoBuilder<'a> {
self.inner.address_mode_w = address_mode_w;
self
}
pub fn mip_lod_bias(mut self, mip_lod_bias: f32) -> SamplerCreateInfoBuilder<'a> {
self.inner.mip_lod_bias = mip_lod_bias;
self
}
pub fn anisotropy_enable(mut self, anisotropy_enable: bool) -> SamplerCreateInfoBuilder<'a> {
self.inner.anisotropy_enable = anisotropy_enable.into();
self
}
pub fn max_anisotropy(mut self, max_anisotropy: f32) -> SamplerCreateInfoBuilder<'a> {
self.inner.max_anisotropy = max_anisotropy;
self
}
pub fn compare_enable(mut self, compare_enable: bool) -> SamplerCreateInfoBuilder<'a> {
self.inner.compare_enable = compare_enable.into();
self
}
pub fn compare_op(mut self, compare_op: CompareOp) -> SamplerCreateInfoBuilder<'a> {
self.inner.compare_op = compare_op;
self
}
pub fn min_lod(mut self, min_lod: f32) -> SamplerCreateInfoBuilder<'a> {
self.inner.min_lod = min_lod;
self
}
pub fn max_lod(mut self, max_lod: f32) -> SamplerCreateInfoBuilder<'a> {
self.inner.max_lod = max_lod;
self
}
pub fn border_color(mut self, border_color: BorderColor) -> SamplerCreateInfoBuilder<'a> {
self.inner.border_color = border_color;
self
}
pub fn unnormalized_coordinates(
mut self,
unnormalized_coordinates: bool,
) -> SamplerCreateInfoBuilder<'a> {
self.inner.unnormalized_coordinates = unnormalized_coordinates.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSamplerCreateInfo>(
mut self,
next: &'a mut T,
) -> SamplerCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SamplerCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandPoolCreateInfo.html>"]
pub struct CommandPoolCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: CommandPoolCreateFlags,
pub queue_family_index: u32,
}
impl ::std::default::Default for CommandPoolCreateInfo {
fn default() -> CommandPoolCreateInfo {
CommandPoolCreateInfo {
s_type: StructureType::COMMAND_POOL_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: CommandPoolCreateFlags::default(),
queue_family_index: u32::default(),
}
}
}
impl CommandPoolCreateInfo {
pub fn builder<'a>() -> CommandPoolCreateInfoBuilder<'a> {
CommandPoolCreateInfoBuilder {
inner: CommandPoolCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CommandPoolCreateInfoBuilder<'a> {
inner: CommandPoolCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCommandPoolCreateInfo {}
impl<'a> ::std::ops::Deref for CommandPoolCreateInfoBuilder<'a> {
type Target = CommandPoolCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CommandPoolCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CommandPoolCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: CommandPoolCreateFlags) -> CommandPoolCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn queue_family_index(
mut self,
queue_family_index: u32,
) -> CommandPoolCreateInfoBuilder<'a> {
self.inner.queue_family_index = queue_family_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCommandPoolCreateInfo>(
mut self,
next: &'a mut T,
) -> CommandPoolCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CommandPoolCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferAllocateInfo.html>"]
pub struct CommandBufferAllocateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub command_pool: CommandPool,
pub level: CommandBufferLevel,
pub command_buffer_count: u32,
}
impl ::std::default::Default for CommandBufferAllocateInfo {
fn default() -> CommandBufferAllocateInfo {
CommandBufferAllocateInfo {
s_type: StructureType::COMMAND_BUFFER_ALLOCATE_INFO,
p_next: ::std::ptr::null(),
command_pool: CommandPool::default(),
level: CommandBufferLevel::default(),
command_buffer_count: u32::default(),
}
}
}
impl CommandBufferAllocateInfo {
pub fn builder<'a>() -> CommandBufferAllocateInfoBuilder<'a> {
CommandBufferAllocateInfoBuilder {
inner: CommandBufferAllocateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CommandBufferAllocateInfoBuilder<'a> {
inner: CommandBufferAllocateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCommandBufferAllocateInfo {}
impl<'a> ::std::ops::Deref for CommandBufferAllocateInfoBuilder<'a> {
type Target = CommandBufferAllocateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CommandBufferAllocateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CommandBufferAllocateInfoBuilder<'a> {
pub fn command_pool(
mut self,
command_pool: CommandPool,
) -> CommandBufferAllocateInfoBuilder<'a> {
self.inner.command_pool = command_pool;
self
}
pub fn level(mut self, level: CommandBufferLevel) -> CommandBufferAllocateInfoBuilder<'a> {
self.inner.level = level;
self
}
pub fn command_buffer_count(
mut self,
command_buffer_count: u32,
) -> CommandBufferAllocateInfoBuilder<'a> {
self.inner.command_buffer_count = command_buffer_count;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCommandBufferAllocateInfo>(
mut self,
next: &'a mut T,
) -> CommandBufferAllocateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CommandBufferAllocateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferInheritanceInfo.html>"]
pub struct CommandBufferInheritanceInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub render_pass: RenderPass,
pub subpass: u32,
pub framebuffer: Framebuffer,
pub occlusion_query_enable: Bool32,
pub query_flags: QueryControlFlags,
pub pipeline_statistics: QueryPipelineStatisticFlags,
}
impl ::std::default::Default for CommandBufferInheritanceInfo {
fn default() -> CommandBufferInheritanceInfo {
CommandBufferInheritanceInfo {
s_type: StructureType::COMMAND_BUFFER_INHERITANCE_INFO,
p_next: ::std::ptr::null(),
render_pass: RenderPass::default(),
subpass: u32::default(),
framebuffer: Framebuffer::default(),
occlusion_query_enable: Bool32::default(),
query_flags: QueryControlFlags::default(),
pipeline_statistics: QueryPipelineStatisticFlags::default(),
}
}
}
impl CommandBufferInheritanceInfo {
pub fn builder<'a>() -> CommandBufferInheritanceInfoBuilder<'a> {
CommandBufferInheritanceInfoBuilder {
inner: CommandBufferInheritanceInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CommandBufferInheritanceInfoBuilder<'a> {
inner: CommandBufferInheritanceInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCommandBufferInheritanceInfo {}
impl<'a> ::std::ops::Deref for CommandBufferInheritanceInfoBuilder<'a> {
type Target = CommandBufferInheritanceInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CommandBufferInheritanceInfoBuilder<'a> {
pub fn render_pass(
mut self,
render_pass: RenderPass,
) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.render_pass = render_pass;
self
}
pub fn subpass(mut self, subpass: u32) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.subpass = subpass;
self
}
pub fn framebuffer(
mut self,
framebuffer: Framebuffer,
) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.framebuffer = framebuffer;
self
}
pub fn occlusion_query_enable(
mut self,
occlusion_query_enable: bool,
) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.occlusion_query_enable = occlusion_query_enable.into();
self
}
pub fn query_flags(
mut self,
query_flags: QueryControlFlags,
) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.query_flags = query_flags;
self
}
pub fn pipeline_statistics(
mut self,
pipeline_statistics: QueryPipelineStatisticFlags,
) -> CommandBufferInheritanceInfoBuilder<'a> {
self.inner.pipeline_statistics = pipeline_statistics;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCommandBufferInheritanceInfo>(
mut self,
next: &'a mut T,
) -> CommandBufferInheritanceInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CommandBufferInheritanceInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferBeginInfo.html>"]
pub struct CommandBufferBeginInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: CommandBufferUsageFlags,
pub p_inheritance_info: *const CommandBufferInheritanceInfo,
}
impl ::std::default::Default for CommandBufferBeginInfo {
fn default() -> CommandBufferBeginInfo {
CommandBufferBeginInfo {
s_type: StructureType::COMMAND_BUFFER_BEGIN_INFO,
p_next: ::std::ptr::null(),
flags: CommandBufferUsageFlags::default(),
p_inheritance_info: ::std::ptr::null(),
}
}
}
impl CommandBufferBeginInfo {
pub fn builder<'a>() -> CommandBufferBeginInfoBuilder<'a> {
CommandBufferBeginInfoBuilder {
inner: CommandBufferBeginInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CommandBufferBeginInfoBuilder<'a> {
inner: CommandBufferBeginInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCommandBufferBeginInfo {}
impl<'a> ::std::ops::Deref for CommandBufferBeginInfoBuilder<'a> {
type Target = CommandBufferBeginInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CommandBufferBeginInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CommandBufferBeginInfoBuilder<'a> {
pub fn flags(mut self, flags: CommandBufferUsageFlags) -> CommandBufferBeginInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn inheritance_info(
mut self,
inheritance_info: &'a CommandBufferInheritanceInfo,
) -> CommandBufferBeginInfoBuilder<'a> {
self.inner.p_inheritance_info = inheritance_info;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCommandBufferBeginInfo>(
mut self,
next: &'a mut T,
) -> CommandBufferBeginInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CommandBufferBeginInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassBeginInfo.html>"]
pub struct RenderPassBeginInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub render_pass: RenderPass,
pub framebuffer: Framebuffer,
pub render_area: Rect2D,
pub clear_value_count: u32,
pub p_clear_values: *const ClearValue,
}
impl fmt::Debug for RenderPassBeginInfo {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("RenderPassBeginInfo")
.field("s_type", &self.s_type)
.field("p_next", &self.p_next)
.field("render_pass", &self.render_pass)
.field("framebuffer", &self.framebuffer)
.field("render_area", &self.render_area)
.field("clear_value_count", &self.clear_value_count)
.field("p_clear_values", &"union")
.finish()
}
}
impl ::std::default::Default for RenderPassBeginInfo {
fn default() -> RenderPassBeginInfo {
RenderPassBeginInfo {
s_type: StructureType::RENDER_PASS_BEGIN_INFO,
p_next: ::std::ptr::null(),
render_pass: RenderPass::default(),
framebuffer: Framebuffer::default(),
render_area: Rect2D::default(),
clear_value_count: u32::default(),
p_clear_values: ::std::ptr::null(),
}
}
}
impl RenderPassBeginInfo {
pub fn builder<'a>() -> RenderPassBeginInfoBuilder<'a> {
RenderPassBeginInfoBuilder {
inner: RenderPassBeginInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassBeginInfoBuilder<'a> {
inner: RenderPassBeginInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsRenderPassBeginInfo {}
impl<'a> ::std::ops::Deref for RenderPassBeginInfoBuilder<'a> {
type Target = RenderPassBeginInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassBeginInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassBeginInfoBuilder<'a> {
pub fn render_pass(mut self, render_pass: RenderPass) -> RenderPassBeginInfoBuilder<'a> {
self.inner.render_pass = render_pass;
self
}
pub fn framebuffer(mut self, framebuffer: Framebuffer) -> RenderPassBeginInfoBuilder<'a> {
self.inner.framebuffer = framebuffer;
self
}
pub fn render_area(mut self, render_area: Rect2D) -> RenderPassBeginInfoBuilder<'a> {
self.inner.render_area = render_area;
self
}
pub fn clear_values(
mut self,
clear_values: &'a [ClearValue],
) -> RenderPassBeginInfoBuilder<'a> {
self.inner.clear_value_count = clear_values.len() as _;
self.inner.p_clear_values = clear_values.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsRenderPassBeginInfo>(
mut self,
next: &'a mut T,
) -> RenderPassBeginInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassBeginInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkClearColorValue.html>"]
pub union ClearColorValue {
pub float32: [f32; 4],
pub int32: [i32; 4],
pub uint32: [u32; 4],
}
impl ::std::default::Default for ClearColorValue {
fn default() -> ClearColorValue {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkClearDepthStencilValue.html>"]
pub struct ClearDepthStencilValue {
pub depth: f32,
pub stencil: u32,
}
impl ClearDepthStencilValue {
pub fn builder<'a>() -> ClearDepthStencilValueBuilder<'a> {
ClearDepthStencilValueBuilder {
inner: ClearDepthStencilValue::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ClearDepthStencilValueBuilder<'a> {
inner: ClearDepthStencilValue,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ClearDepthStencilValueBuilder<'a> {
type Target = ClearDepthStencilValue;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ClearDepthStencilValueBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ClearDepthStencilValueBuilder<'a> {
pub fn depth(mut self, depth: f32) -> ClearDepthStencilValueBuilder<'a> {
self.inner.depth = depth;
self
}
pub fn stencil(mut self, stencil: u32) -> ClearDepthStencilValueBuilder<'a> {
self.inner.stencil = stencil;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ClearDepthStencilValue {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkClearValue.html>"]
pub union ClearValue {
pub color: ClearColorValue,
pub depth_stencil: ClearDepthStencilValue,
}
impl ::std::default::Default for ClearValue {
fn default() -> ClearValue {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone, Default)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkClearAttachment.html>"]
pub struct ClearAttachment {
pub aspect_mask: ImageAspectFlags,
pub color_attachment: u32,
pub clear_value: ClearValue,
}
impl fmt::Debug for ClearAttachment {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("ClearAttachment")
.field("aspect_mask", &self.aspect_mask)
.field("color_attachment", &self.color_attachment)
.field("clear_value", &"union")
.finish()
}
}
impl ClearAttachment {
pub fn builder<'a>() -> ClearAttachmentBuilder<'a> {
ClearAttachmentBuilder {
inner: ClearAttachment::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ClearAttachmentBuilder<'a> {
inner: ClearAttachment,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ClearAttachmentBuilder<'a> {
type Target = ClearAttachment;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ClearAttachmentBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ClearAttachmentBuilder<'a> {
pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> ClearAttachmentBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
pub fn color_attachment(mut self, color_attachment: u32) -> ClearAttachmentBuilder<'a> {
self.inner.color_attachment = color_attachment;
self
}
pub fn clear_value(mut self, clear_value: ClearValue) -> ClearAttachmentBuilder<'a> {
self.inner.clear_value = clear_value;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ClearAttachment {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentDescription.html>"]
pub struct AttachmentDescription {
pub flags: AttachmentDescriptionFlags,
pub format: Format,
pub samples: SampleCountFlags,
pub load_op: AttachmentLoadOp,
pub store_op: AttachmentStoreOp,
pub stencil_load_op: AttachmentLoadOp,
pub stencil_store_op: AttachmentStoreOp,
pub initial_layout: ImageLayout,
pub final_layout: ImageLayout,
}
impl AttachmentDescription {
pub fn builder<'a>() -> AttachmentDescriptionBuilder<'a> {
AttachmentDescriptionBuilder {
inner: AttachmentDescription::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AttachmentDescriptionBuilder<'a> {
inner: AttachmentDescription,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for AttachmentDescriptionBuilder<'a> {
type Target = AttachmentDescription;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AttachmentDescriptionBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AttachmentDescriptionBuilder<'a> {
pub fn flags(mut self, flags: AttachmentDescriptionFlags) -> AttachmentDescriptionBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn format(mut self, format: Format) -> AttachmentDescriptionBuilder<'a> {
self.inner.format = format;
self
}
pub fn samples(mut self, samples: SampleCountFlags) -> AttachmentDescriptionBuilder<'a> {
self.inner.samples = samples;
self
}
pub fn load_op(mut self, load_op: AttachmentLoadOp) -> AttachmentDescriptionBuilder<'a> {
self.inner.load_op = load_op;
self
}
pub fn store_op(mut self, store_op: AttachmentStoreOp) -> AttachmentDescriptionBuilder<'a> {
self.inner.store_op = store_op;
self
}
pub fn stencil_load_op(
mut self,
stencil_load_op: AttachmentLoadOp,
) -> AttachmentDescriptionBuilder<'a> {
self.inner.stencil_load_op = stencil_load_op;
self
}
pub fn stencil_store_op(
mut self,
stencil_store_op: AttachmentStoreOp,
) -> AttachmentDescriptionBuilder<'a> {
self.inner.stencil_store_op = stencil_store_op;
self
}
pub fn initial_layout(
mut self,
initial_layout: ImageLayout,
) -> AttachmentDescriptionBuilder<'a> {
self.inner.initial_layout = initial_layout;
self
}
pub fn final_layout(mut self, final_layout: ImageLayout) -> AttachmentDescriptionBuilder<'a> {
self.inner.final_layout = final_layout;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AttachmentDescription {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentReference.html>"]
pub struct AttachmentReference {
pub attachment: u32,
pub layout: ImageLayout,
}
impl AttachmentReference {
pub fn builder<'a>() -> AttachmentReferenceBuilder<'a> {
AttachmentReferenceBuilder {
inner: AttachmentReference::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AttachmentReferenceBuilder<'a> {
inner: AttachmentReference,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for AttachmentReferenceBuilder<'a> {
type Target = AttachmentReference;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AttachmentReferenceBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AttachmentReferenceBuilder<'a> {
pub fn attachment(mut self, attachment: u32) -> AttachmentReferenceBuilder<'a> {
self.inner.attachment = attachment;
self
}
pub fn layout(mut self, layout: ImageLayout) -> AttachmentReferenceBuilder<'a> {
self.inner.layout = layout;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AttachmentReference {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDescription.html>"]
pub struct SubpassDescription {
pub flags: SubpassDescriptionFlags,
pub pipeline_bind_point: PipelineBindPoint,
pub input_attachment_count: u32,
pub p_input_attachments: *const AttachmentReference,
pub color_attachment_count: u32,
pub p_color_attachments: *const AttachmentReference,
pub p_resolve_attachments: *const AttachmentReference,
pub p_depth_stencil_attachment: *const AttachmentReference,
pub preserve_attachment_count: u32,
pub p_preserve_attachments: *const u32,
}
impl ::std::default::Default for SubpassDescription {
fn default() -> SubpassDescription {
SubpassDescription {
flags: SubpassDescriptionFlags::default(),
pipeline_bind_point: PipelineBindPoint::default(),
input_attachment_count: u32::default(),
p_input_attachments: ::std::ptr::null(),
color_attachment_count: u32::default(),
p_color_attachments: ::std::ptr::null(),
p_resolve_attachments: ::std::ptr::null(),
p_depth_stencil_attachment: ::std::ptr::null(),
preserve_attachment_count: u32::default(),
p_preserve_attachments: ::std::ptr::null(),
}
}
}
impl SubpassDescription {
pub fn builder<'a>() -> SubpassDescriptionBuilder<'a> {
SubpassDescriptionBuilder {
inner: SubpassDescription::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassDescriptionBuilder<'a> {
inner: SubpassDescription,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SubpassDescriptionBuilder<'a> {
type Target = SubpassDescription;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassDescriptionBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassDescriptionBuilder<'a> {
pub fn flags(mut self, flags: SubpassDescriptionFlags) -> SubpassDescriptionBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pipeline_bind_point(
mut self,
pipeline_bind_point: PipelineBindPoint,
) -> SubpassDescriptionBuilder<'a> {
self.inner.pipeline_bind_point = pipeline_bind_point;
self
}
pub fn input_attachments(
mut self,
input_attachments: &'a [AttachmentReference],
) -> SubpassDescriptionBuilder<'a> {
self.inner.input_attachment_count = input_attachments.len() as _;
self.inner.p_input_attachments = input_attachments.as_ptr();
self
}
pub fn color_attachments(
mut self,
color_attachments: &'a [AttachmentReference],
) -> SubpassDescriptionBuilder<'a> {
self.inner.color_attachment_count = color_attachments.len() as _;
self.inner.p_color_attachments = color_attachments.as_ptr();
self
}
pub fn resolve_attachments(
mut self,
resolve_attachments: &'a [AttachmentReference],
) -> SubpassDescriptionBuilder<'a> {
self.inner.color_attachment_count = resolve_attachments.len() as _;
self.inner.p_resolve_attachments = resolve_attachments.as_ptr();
self
}
pub fn depth_stencil_attachment(
mut self,
depth_stencil_attachment: &'a AttachmentReference,
) -> SubpassDescriptionBuilder<'a> {
self.inner.p_depth_stencil_attachment = depth_stencil_attachment;
self
}
pub fn preserve_attachments(
mut self,
preserve_attachments: &'a [u32],
) -> SubpassDescriptionBuilder<'a> {
self.inner.preserve_attachment_count = preserve_attachments.len() as _;
self.inner.p_preserve_attachments = preserve_attachments.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassDescription {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDependency.html>"]
pub struct SubpassDependency {
pub src_subpass: u32,
pub dst_subpass: u32,
pub src_stage_mask: PipelineStageFlags,
pub dst_stage_mask: PipelineStageFlags,
pub src_access_mask: AccessFlags,
pub dst_access_mask: AccessFlags,
pub dependency_flags: DependencyFlags,
}
impl SubpassDependency {
pub fn builder<'a>() -> SubpassDependencyBuilder<'a> {
SubpassDependencyBuilder {
inner: SubpassDependency::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassDependencyBuilder<'a> {
inner: SubpassDependency,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SubpassDependencyBuilder<'a> {
type Target = SubpassDependency;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassDependencyBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassDependencyBuilder<'a> {
pub fn src_subpass(mut self, src_subpass: u32) -> SubpassDependencyBuilder<'a> {
self.inner.src_subpass = src_subpass;
self
}
pub fn dst_subpass(mut self, dst_subpass: u32) -> SubpassDependencyBuilder<'a> {
self.inner.dst_subpass = dst_subpass;
self
}
pub fn src_stage_mask(
mut self,
src_stage_mask: PipelineStageFlags,
) -> SubpassDependencyBuilder<'a> {
self.inner.src_stage_mask = src_stage_mask;
self
}
pub fn dst_stage_mask(
mut self,
dst_stage_mask: PipelineStageFlags,
) -> SubpassDependencyBuilder<'a> {
self.inner.dst_stage_mask = dst_stage_mask;
self
}
pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> SubpassDependencyBuilder<'a> {
self.inner.src_access_mask = src_access_mask;
self
}
pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> SubpassDependencyBuilder<'a> {
self.inner.dst_access_mask = dst_access_mask;
self
}
pub fn dependency_flags(
mut self,
dependency_flags: DependencyFlags,
) -> SubpassDependencyBuilder<'a> {
self.inner.dependency_flags = dependency_flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassDependency {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassCreateInfo.html>"]
pub struct RenderPassCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: RenderPassCreateFlags,
pub attachment_count: u32,
pub p_attachments: *const AttachmentDescription,
pub subpass_count: u32,
pub p_subpasses: *const SubpassDescription,
pub dependency_count: u32,
pub p_dependencies: *const SubpassDependency,
}
impl ::std::default::Default for RenderPassCreateInfo {
fn default() -> RenderPassCreateInfo {
RenderPassCreateInfo {
s_type: StructureType::RENDER_PASS_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: RenderPassCreateFlags::default(),
attachment_count: u32::default(),
p_attachments: ::std::ptr::null(),
subpass_count: u32::default(),
p_subpasses: ::std::ptr::null(),
dependency_count: u32::default(),
p_dependencies: ::std::ptr::null(),
}
}
}
impl RenderPassCreateInfo {
pub fn builder<'a>() -> RenderPassCreateInfoBuilder<'a> {
RenderPassCreateInfoBuilder {
inner: RenderPassCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassCreateInfoBuilder<'a> {
inner: RenderPassCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsRenderPassCreateInfo {}
impl<'a> ::std::ops::Deref for RenderPassCreateInfoBuilder<'a> {
type Target = RenderPassCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: RenderPassCreateFlags) -> RenderPassCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn attachments(
mut self,
attachments: &'a [AttachmentDescription],
) -> RenderPassCreateInfoBuilder<'a> {
self.inner.attachment_count = attachments.len() as _;
self.inner.p_attachments = attachments.as_ptr();
self
}
pub fn subpasses(
mut self,
subpasses: &'a [SubpassDescription],
) -> RenderPassCreateInfoBuilder<'a> {
self.inner.subpass_count = subpasses.len() as _;
self.inner.p_subpasses = subpasses.as_ptr();
self
}
pub fn dependencies(
mut self,
dependencies: &'a [SubpassDependency],
) -> RenderPassCreateInfoBuilder<'a> {
self.inner.dependency_count = dependencies.len() as _;
self.inner.p_dependencies = dependencies.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsRenderPassCreateInfo>(
mut self,
next: &'a mut T,
) -> RenderPassCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkEventCreateInfo.html>"]
pub struct EventCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: EventCreateFlags,
}
impl ::std::default::Default for EventCreateInfo {
fn default() -> EventCreateInfo {
EventCreateInfo {
s_type: StructureType::EVENT_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: EventCreateFlags::default(),
}
}
}
impl EventCreateInfo {
pub fn builder<'a>() -> EventCreateInfoBuilder<'a> {
EventCreateInfoBuilder {
inner: EventCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct EventCreateInfoBuilder<'a> {
inner: EventCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsEventCreateInfo {}
impl<'a> ::std::ops::Deref for EventCreateInfoBuilder<'a> {
type Target = EventCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for EventCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> EventCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: EventCreateFlags) -> EventCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsEventCreateInfo>(
mut self,
next: &'a mut T,
) -> EventCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> EventCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFenceCreateInfo.html>"]
pub struct FenceCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: FenceCreateFlags,
}
impl ::std::default::Default for FenceCreateInfo {
fn default() -> FenceCreateInfo {
FenceCreateInfo {
s_type: StructureType::FENCE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: FenceCreateFlags::default(),
}
}
}
impl FenceCreateInfo {
pub fn builder<'a>() -> FenceCreateInfoBuilder<'a> {
FenceCreateInfoBuilder {
inner: FenceCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FenceCreateInfoBuilder<'a> {
inner: FenceCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsFenceCreateInfo {}
impl<'a> ::std::ops::Deref for FenceCreateInfoBuilder<'a> {
type Target = FenceCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FenceCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FenceCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: FenceCreateFlags) -> FenceCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsFenceCreateInfo>(
mut self,
next: &'a mut T,
) -> FenceCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FenceCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFeatures.html>"]
pub struct PhysicalDeviceFeatures {
pub robust_buffer_access: Bool32,
pub full_draw_index_uint32: Bool32,
pub image_cube_array: Bool32,
pub independent_blend: Bool32,
pub geometry_shader: Bool32,
pub tessellation_shader: Bool32,
pub sample_rate_shading: Bool32,
pub dual_src_blend: Bool32,
pub logic_op: Bool32,
pub multi_draw_indirect: Bool32,
pub draw_indirect_first_instance: Bool32,
pub depth_clamp: Bool32,
pub depth_bias_clamp: Bool32,
pub fill_mode_non_solid: Bool32,
pub depth_bounds: Bool32,
pub wide_lines: Bool32,
pub large_points: Bool32,
pub alpha_to_one: Bool32,
pub multi_viewport: Bool32,
pub sampler_anisotropy: Bool32,
pub texture_compression_etc2: Bool32,
pub texture_compression_astc_ldr: Bool32,
pub texture_compression_bc: Bool32,
pub occlusion_query_precise: Bool32,
pub pipeline_statistics_query: Bool32,
pub vertex_pipeline_stores_and_atomics: Bool32,
pub fragment_stores_and_atomics: Bool32,
pub shader_tessellation_and_geometry_point_size: Bool32,
pub shader_image_gather_extended: Bool32,
pub shader_storage_image_extended_formats: Bool32,
pub shader_storage_image_multisample: Bool32,
pub shader_storage_image_read_without_format: Bool32,
pub shader_storage_image_write_without_format: Bool32,
pub shader_uniform_buffer_array_dynamic_indexing: Bool32,
pub shader_sampled_image_array_dynamic_indexing: Bool32,
pub shader_storage_buffer_array_dynamic_indexing: Bool32,
pub shader_storage_image_array_dynamic_indexing: Bool32,
pub shader_clip_distance: Bool32,
pub shader_cull_distance: Bool32,
pub shader_float64: Bool32,
pub shader_int64: Bool32,
pub shader_int16: Bool32,
pub shader_resource_residency: Bool32,
pub shader_resource_min_lod: Bool32,
pub sparse_binding: Bool32,
pub sparse_residency_buffer: Bool32,
pub sparse_residency_image2_d: Bool32,
pub sparse_residency_image3_d: Bool32,
pub sparse_residency2_samples: Bool32,
pub sparse_residency4_samples: Bool32,
pub sparse_residency8_samples: Bool32,
pub sparse_residency16_samples: Bool32,
pub sparse_residency_aliased: Bool32,
pub variable_multisample_rate: Bool32,
pub inherited_queries: Bool32,
}
impl PhysicalDeviceFeatures {
pub fn builder<'a>() -> PhysicalDeviceFeaturesBuilder<'a> {
PhysicalDeviceFeaturesBuilder {
inner: PhysicalDeviceFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFeaturesBuilder<'a> {
inner: PhysicalDeviceFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PhysicalDeviceFeaturesBuilder<'a> {
type Target = PhysicalDeviceFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFeaturesBuilder<'a> {
pub fn robust_buffer_access(
mut self,
robust_buffer_access: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.robust_buffer_access = robust_buffer_access.into();
self
}
pub fn full_draw_index_uint32(
mut self,
full_draw_index_uint32: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.full_draw_index_uint32 = full_draw_index_uint32.into();
self
}
pub fn image_cube_array(mut self, image_cube_array: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.image_cube_array = image_cube_array.into();
self
}
pub fn independent_blend(
mut self,
independent_blend: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.independent_blend = independent_blend.into();
self
}
pub fn geometry_shader(mut self, geometry_shader: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.geometry_shader = geometry_shader.into();
self
}
pub fn tessellation_shader(
mut self,
tessellation_shader: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.tessellation_shader = tessellation_shader.into();
self
}
pub fn sample_rate_shading(
mut self,
sample_rate_shading: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sample_rate_shading = sample_rate_shading.into();
self
}
pub fn dual_src_blend(mut self, dual_src_blend: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.dual_src_blend = dual_src_blend.into();
self
}
pub fn logic_op(mut self, logic_op: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.logic_op = logic_op.into();
self
}
pub fn multi_draw_indirect(
mut self,
multi_draw_indirect: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.multi_draw_indirect = multi_draw_indirect.into();
self
}
pub fn draw_indirect_first_instance(
mut self,
draw_indirect_first_instance: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.draw_indirect_first_instance = draw_indirect_first_instance.into();
self
}
pub fn depth_clamp(mut self, depth_clamp: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.depth_clamp = depth_clamp.into();
self
}
pub fn depth_bias_clamp(mut self, depth_bias_clamp: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.depth_bias_clamp = depth_bias_clamp.into();
self
}
pub fn fill_mode_non_solid(
mut self,
fill_mode_non_solid: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.fill_mode_non_solid = fill_mode_non_solid.into();
self
}
pub fn depth_bounds(mut self, depth_bounds: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.depth_bounds = depth_bounds.into();
self
}
pub fn wide_lines(mut self, wide_lines: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.wide_lines = wide_lines.into();
self
}
pub fn large_points(mut self, large_points: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.large_points = large_points.into();
self
}
pub fn alpha_to_one(mut self, alpha_to_one: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.alpha_to_one = alpha_to_one.into();
self
}
pub fn multi_viewport(mut self, multi_viewport: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.multi_viewport = multi_viewport.into();
self
}
pub fn sampler_anisotropy(
mut self,
sampler_anisotropy: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sampler_anisotropy = sampler_anisotropy.into();
self
}
pub fn texture_compression_etc2(
mut self,
texture_compression_etc2: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.texture_compression_etc2 = texture_compression_etc2.into();
self
}
pub fn texture_compression_astc_ldr(
mut self,
texture_compression_astc_ldr: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.texture_compression_astc_ldr = texture_compression_astc_ldr.into();
self
}
pub fn texture_compression_bc(
mut self,
texture_compression_bc: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.texture_compression_bc = texture_compression_bc.into();
self
}
pub fn occlusion_query_precise(
mut self,
occlusion_query_precise: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.occlusion_query_precise = occlusion_query_precise.into();
self
}
pub fn pipeline_statistics_query(
mut self,
pipeline_statistics_query: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.pipeline_statistics_query = pipeline_statistics_query.into();
self
}
pub fn vertex_pipeline_stores_and_atomics(
mut self,
vertex_pipeline_stores_and_atomics: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.vertex_pipeline_stores_and_atomics = vertex_pipeline_stores_and_atomics.into();
self
}
pub fn fragment_stores_and_atomics(
mut self,
fragment_stores_and_atomics: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.fragment_stores_and_atomics = fragment_stores_and_atomics.into();
self
}
pub fn shader_tessellation_and_geometry_point_size(
mut self,
shader_tessellation_and_geometry_point_size: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_tessellation_and_geometry_point_size =
shader_tessellation_and_geometry_point_size.into();
self
}
pub fn shader_image_gather_extended(
mut self,
shader_image_gather_extended: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_image_gather_extended = shader_image_gather_extended.into();
self
}
pub fn shader_storage_image_extended_formats(
mut self,
shader_storage_image_extended_formats: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_image_extended_formats =
shader_storage_image_extended_formats.into();
self
}
pub fn shader_storage_image_multisample(
mut self,
shader_storage_image_multisample: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_image_multisample = shader_storage_image_multisample.into();
self
}
pub fn shader_storage_image_read_without_format(
mut self,
shader_storage_image_read_without_format: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_image_read_without_format =
shader_storage_image_read_without_format.into();
self
}
pub fn shader_storage_image_write_without_format(
mut self,
shader_storage_image_write_without_format: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_image_write_without_format =
shader_storage_image_write_without_format.into();
self
}
pub fn shader_uniform_buffer_array_dynamic_indexing(
mut self,
shader_uniform_buffer_array_dynamic_indexing: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_uniform_buffer_array_dynamic_indexing =
shader_uniform_buffer_array_dynamic_indexing.into();
self
}
pub fn shader_sampled_image_array_dynamic_indexing(
mut self,
shader_sampled_image_array_dynamic_indexing: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_sampled_image_array_dynamic_indexing =
shader_sampled_image_array_dynamic_indexing.into();
self
}
pub fn shader_storage_buffer_array_dynamic_indexing(
mut self,
shader_storage_buffer_array_dynamic_indexing: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_buffer_array_dynamic_indexing =
shader_storage_buffer_array_dynamic_indexing.into();
self
}
pub fn shader_storage_image_array_dynamic_indexing(
mut self,
shader_storage_image_array_dynamic_indexing: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_storage_image_array_dynamic_indexing =
shader_storage_image_array_dynamic_indexing.into();
self
}
pub fn shader_clip_distance(
mut self,
shader_clip_distance: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_clip_distance = shader_clip_distance.into();
self
}
pub fn shader_cull_distance(
mut self,
shader_cull_distance: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_cull_distance = shader_cull_distance.into();
self
}
pub fn shader_float64(mut self, shader_float64: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_float64 = shader_float64.into();
self
}
pub fn shader_int64(mut self, shader_int64: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_int64 = shader_int64.into();
self
}
pub fn shader_int16(mut self, shader_int16: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_int16 = shader_int16.into();
self
}
pub fn shader_resource_residency(
mut self,
shader_resource_residency: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_resource_residency = shader_resource_residency.into();
self
}
pub fn shader_resource_min_lod(
mut self,
shader_resource_min_lod: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.shader_resource_min_lod = shader_resource_min_lod.into();
self
}
pub fn sparse_binding(mut self, sparse_binding: bool) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_binding = sparse_binding.into();
self
}
pub fn sparse_residency_buffer(
mut self,
sparse_residency_buffer: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency_buffer = sparse_residency_buffer.into();
self
}
pub fn sparse_residency_image2_d(
mut self,
sparse_residency_image2_d: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency_image2_d = sparse_residency_image2_d.into();
self
}
pub fn sparse_residency_image3_d(
mut self,
sparse_residency_image3_d: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency_image3_d = sparse_residency_image3_d.into();
self
}
pub fn sparse_residency2_samples(
mut self,
sparse_residency2_samples: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency2_samples = sparse_residency2_samples.into();
self
}
pub fn sparse_residency4_samples(
mut self,
sparse_residency4_samples: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency4_samples = sparse_residency4_samples.into();
self
}
pub fn sparse_residency8_samples(
mut self,
sparse_residency8_samples: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency8_samples = sparse_residency8_samples.into();
self
}
pub fn sparse_residency16_samples(
mut self,
sparse_residency16_samples: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency16_samples = sparse_residency16_samples.into();
self
}
pub fn sparse_residency_aliased(
mut self,
sparse_residency_aliased: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.sparse_residency_aliased = sparse_residency_aliased.into();
self
}
pub fn variable_multisample_rate(
mut self,
variable_multisample_rate: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.variable_multisample_rate = variable_multisample_rate.into();
self
}
pub fn inherited_queries(
mut self,
inherited_queries: bool,
) -> PhysicalDeviceFeaturesBuilder<'a> {
self.inner.inherited_queries = inherited_queries.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSparseProperties.html>"]
pub struct PhysicalDeviceSparseProperties {
pub residency_standard2_d_block_shape: Bool32,
pub residency_standard2_d_multisample_block_shape: Bool32,
pub residency_standard3_d_block_shape: Bool32,
pub residency_aligned_mip_size: Bool32,
pub residency_non_resident_strict: Bool32,
}
impl PhysicalDeviceSparseProperties {
pub fn builder<'a>() -> PhysicalDeviceSparsePropertiesBuilder<'a> {
PhysicalDeviceSparsePropertiesBuilder {
inner: PhysicalDeviceSparseProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSparsePropertiesBuilder<'a> {
inner: PhysicalDeviceSparseProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PhysicalDeviceSparsePropertiesBuilder<'a> {
type Target = PhysicalDeviceSparseProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSparsePropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSparsePropertiesBuilder<'a> {
pub fn residency_standard2_d_block_shape(
mut self,
residency_standard2_d_block_shape: bool,
) -> PhysicalDeviceSparsePropertiesBuilder<'a> {
self.inner.residency_standard2_d_block_shape = residency_standard2_d_block_shape.into();
self
}
pub fn residency_standard2_d_multisample_block_shape(
mut self,
residency_standard2_d_multisample_block_shape: bool,
) -> PhysicalDeviceSparsePropertiesBuilder<'a> {
self.inner.residency_standard2_d_multisample_block_shape =
residency_standard2_d_multisample_block_shape.into();
self
}
pub fn residency_standard3_d_block_shape(
mut self,
residency_standard3_d_block_shape: bool,
) -> PhysicalDeviceSparsePropertiesBuilder<'a> {
self.inner.residency_standard3_d_block_shape = residency_standard3_d_block_shape.into();
self
}
pub fn residency_aligned_mip_size(
mut self,
residency_aligned_mip_size: bool,
) -> PhysicalDeviceSparsePropertiesBuilder<'a> {
self.inner.residency_aligned_mip_size = residency_aligned_mip_size.into();
self
}
pub fn residency_non_resident_strict(
mut self,
residency_non_resident_strict: bool,
) -> PhysicalDeviceSparsePropertiesBuilder<'a> {
self.inner.residency_non_resident_strict = residency_non_resident_strict.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSparseProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceLimits.html>"]
pub struct PhysicalDeviceLimits {
pub max_image_dimension1_d: u32,
pub max_image_dimension2_d: u32,
pub max_image_dimension3_d: u32,
pub max_image_dimension_cube: u32,
pub max_image_array_layers: u32,
pub max_texel_buffer_elements: u32,
pub max_uniform_buffer_range: u32,
pub max_storage_buffer_range: u32,
pub max_push_constants_size: u32,
pub max_memory_allocation_count: u32,
pub max_sampler_allocation_count: u32,
pub buffer_image_granularity: DeviceSize,
pub sparse_address_space_size: DeviceSize,
pub max_bound_descriptor_sets: u32,
pub max_per_stage_descriptor_samplers: u32,
pub max_per_stage_descriptor_uniform_buffers: u32,
pub max_per_stage_descriptor_storage_buffers: u32,
pub max_per_stage_descriptor_sampled_images: u32,
pub max_per_stage_descriptor_storage_images: u32,
pub max_per_stage_descriptor_input_attachments: u32,
pub max_per_stage_resources: u32,
pub max_descriptor_set_samplers: u32,
pub max_descriptor_set_uniform_buffers: u32,
pub max_descriptor_set_uniform_buffers_dynamic: u32,
pub max_descriptor_set_storage_buffers: u32,
pub max_descriptor_set_storage_buffers_dynamic: u32,
pub max_descriptor_set_sampled_images: u32,
pub max_descriptor_set_storage_images: u32,
pub max_descriptor_set_input_attachments: u32,
pub max_vertex_input_attributes: u32,
pub max_vertex_input_bindings: u32,
pub max_vertex_input_attribute_offset: u32,
pub max_vertex_input_binding_stride: u32,
pub max_vertex_output_components: u32,
pub max_tessellation_generation_level: u32,
pub max_tessellation_patch_size: u32,
pub max_tessellation_control_per_vertex_input_components: u32,
pub max_tessellation_control_per_vertex_output_components: u32,
pub max_tessellation_control_per_patch_output_components: u32,
pub max_tessellation_control_total_output_components: u32,
pub max_tessellation_evaluation_input_components: u32,
pub max_tessellation_evaluation_output_components: u32,
pub max_geometry_shader_invocations: u32,
pub max_geometry_input_components: u32,
pub max_geometry_output_components: u32,
pub max_geometry_output_vertices: u32,
pub max_geometry_total_output_components: u32,
pub max_fragment_input_components: u32,
pub max_fragment_output_attachments: u32,
pub max_fragment_dual_src_attachments: u32,
pub max_fragment_combined_output_resources: u32,
pub max_compute_shared_memory_size: u32,
pub max_compute_work_group_count: [u32; 3],
pub max_compute_work_group_invocations: u32,
pub max_compute_work_group_size: [u32; 3],
pub sub_pixel_precision_bits: u32,
pub sub_texel_precision_bits: u32,
pub mipmap_precision_bits: u32,
pub max_draw_indexed_index_value: u32,
pub max_draw_indirect_count: u32,
pub max_sampler_lod_bias: f32,
pub max_sampler_anisotropy: f32,
pub max_viewports: u32,
pub max_viewport_dimensions: [u32; 2],
pub viewport_bounds_range: [f32; 2],
pub viewport_sub_pixel_bits: u32,
pub min_memory_map_alignment: usize,
pub min_texel_buffer_offset_alignment: DeviceSize,
pub min_uniform_buffer_offset_alignment: DeviceSize,
pub min_storage_buffer_offset_alignment: DeviceSize,
pub min_texel_offset: i32,
pub max_texel_offset: u32,
pub min_texel_gather_offset: i32,
pub max_texel_gather_offset: u32,
pub min_interpolation_offset: f32,
pub max_interpolation_offset: f32,
pub sub_pixel_interpolation_offset_bits: u32,
pub max_framebuffer_width: u32,
pub max_framebuffer_height: u32,
pub max_framebuffer_layers: u32,
pub framebuffer_color_sample_counts: SampleCountFlags,
pub framebuffer_depth_sample_counts: SampleCountFlags,
pub framebuffer_stencil_sample_counts: SampleCountFlags,
pub framebuffer_no_attachments_sample_counts: SampleCountFlags,
pub max_color_attachments: u32,
pub sampled_image_color_sample_counts: SampleCountFlags,
pub sampled_image_integer_sample_counts: SampleCountFlags,
pub sampled_image_depth_sample_counts: SampleCountFlags,
pub sampled_image_stencil_sample_counts: SampleCountFlags,
pub storage_image_sample_counts: SampleCountFlags,
pub max_sample_mask_words: u32,
pub timestamp_compute_and_graphics: Bool32,
pub timestamp_period: f32,
pub max_clip_distances: u32,
pub max_cull_distances: u32,
pub max_combined_clip_and_cull_distances: u32,
pub discrete_queue_priorities: u32,
pub point_size_range: [f32; 2],
pub line_width_range: [f32; 2],
pub point_size_granularity: f32,
pub line_width_granularity: f32,
pub strict_lines: Bool32,
pub standard_sample_locations: Bool32,
pub optimal_buffer_copy_offset_alignment: DeviceSize,
pub optimal_buffer_copy_row_pitch_alignment: DeviceSize,
pub non_coherent_atom_size: DeviceSize,
}
impl ::std::default::Default for PhysicalDeviceLimits {
fn default() -> PhysicalDeviceLimits {
PhysicalDeviceLimits {
max_image_dimension1_d: u32::default(),
max_image_dimension2_d: u32::default(),
max_image_dimension3_d: u32::default(),
max_image_dimension_cube: u32::default(),
max_image_array_layers: u32::default(),
max_texel_buffer_elements: u32::default(),
max_uniform_buffer_range: u32::default(),
max_storage_buffer_range: u32::default(),
max_push_constants_size: u32::default(),
max_memory_allocation_count: u32::default(),
max_sampler_allocation_count: u32::default(),
buffer_image_granularity: DeviceSize::default(),
sparse_address_space_size: DeviceSize::default(),
max_bound_descriptor_sets: u32::default(),
max_per_stage_descriptor_samplers: u32::default(),
max_per_stage_descriptor_uniform_buffers: u32::default(),
max_per_stage_descriptor_storage_buffers: u32::default(),
max_per_stage_descriptor_sampled_images: u32::default(),
max_per_stage_descriptor_storage_images: u32::default(),
max_per_stage_descriptor_input_attachments: u32::default(),
max_per_stage_resources: u32::default(),
max_descriptor_set_samplers: u32::default(),
max_descriptor_set_uniform_buffers: u32::default(),
max_descriptor_set_uniform_buffers_dynamic: u32::default(),
max_descriptor_set_storage_buffers: u32::default(),
max_descriptor_set_storage_buffers_dynamic: u32::default(),
max_descriptor_set_sampled_images: u32::default(),
max_descriptor_set_storage_images: u32::default(),
max_descriptor_set_input_attachments: u32::default(),
max_vertex_input_attributes: u32::default(),
max_vertex_input_bindings: u32::default(),
max_vertex_input_attribute_offset: u32::default(),
max_vertex_input_binding_stride: u32::default(),
max_vertex_output_components: u32::default(),
max_tessellation_generation_level: u32::default(),
max_tessellation_patch_size: u32::default(),
max_tessellation_control_per_vertex_input_components: u32::default(),
max_tessellation_control_per_vertex_output_components: u32::default(),
max_tessellation_control_per_patch_output_components: u32::default(),
max_tessellation_control_total_output_components: u32::default(),
max_tessellation_evaluation_input_components: u32::default(),
max_tessellation_evaluation_output_components: u32::default(),
max_geometry_shader_invocations: u32::default(),
max_geometry_input_components: u32::default(),
max_geometry_output_components: u32::default(),
max_geometry_output_vertices: u32::default(),
max_geometry_total_output_components: u32::default(),
max_fragment_input_components: u32::default(),
max_fragment_output_attachments: u32::default(),
max_fragment_dual_src_attachments: u32::default(),
max_fragment_combined_output_resources: u32::default(),
max_compute_shared_memory_size: u32::default(),
max_compute_work_group_count: unsafe { ::std::mem::zeroed() },
max_compute_work_group_invocations: u32::default(),
max_compute_work_group_size: unsafe { ::std::mem::zeroed() },
sub_pixel_precision_bits: u32::default(),
sub_texel_precision_bits: u32::default(),
mipmap_precision_bits: u32::default(),
max_draw_indexed_index_value: u32::default(),
max_draw_indirect_count: u32::default(),
max_sampler_lod_bias: f32::default(),
max_sampler_anisotropy: f32::default(),
max_viewports: u32::default(),
max_viewport_dimensions: unsafe { ::std::mem::zeroed() },
viewport_bounds_range: unsafe { ::std::mem::zeroed() },
viewport_sub_pixel_bits: u32::default(),
min_memory_map_alignment: usize::default(),
min_texel_buffer_offset_alignment: DeviceSize::default(),
min_uniform_buffer_offset_alignment: DeviceSize::default(),
min_storage_buffer_offset_alignment: DeviceSize::default(),
min_texel_offset: i32::default(),
max_texel_offset: u32::default(),
min_texel_gather_offset: i32::default(),
max_texel_gather_offset: u32::default(),
min_interpolation_offset: f32::default(),
max_interpolation_offset: f32::default(),
sub_pixel_interpolation_offset_bits: u32::default(),
max_framebuffer_width: u32::default(),
max_framebuffer_height: u32::default(),
max_framebuffer_layers: u32::default(),
framebuffer_color_sample_counts: SampleCountFlags::default(),
framebuffer_depth_sample_counts: SampleCountFlags::default(),
framebuffer_stencil_sample_counts: SampleCountFlags::default(),
framebuffer_no_attachments_sample_counts: SampleCountFlags::default(),
max_color_attachments: u32::default(),
sampled_image_color_sample_counts: SampleCountFlags::default(),
sampled_image_integer_sample_counts: SampleCountFlags::default(),
sampled_image_depth_sample_counts: SampleCountFlags::default(),
sampled_image_stencil_sample_counts: SampleCountFlags::default(),
storage_image_sample_counts: SampleCountFlags::default(),
max_sample_mask_words: u32::default(),
timestamp_compute_and_graphics: Bool32::default(),
timestamp_period: f32::default(),
max_clip_distances: u32::default(),
max_cull_distances: u32::default(),
max_combined_clip_and_cull_distances: u32::default(),
discrete_queue_priorities: u32::default(),
point_size_range: unsafe { ::std::mem::zeroed() },
line_width_range: unsafe { ::std::mem::zeroed() },
point_size_granularity: f32::default(),
line_width_granularity: f32::default(),
strict_lines: Bool32::default(),
standard_sample_locations: Bool32::default(),
optimal_buffer_copy_offset_alignment: DeviceSize::default(),
optimal_buffer_copy_row_pitch_alignment: DeviceSize::default(),
non_coherent_atom_size: DeviceSize::default(),
}
}
}
impl PhysicalDeviceLimits {
pub fn builder<'a>() -> PhysicalDeviceLimitsBuilder<'a> {
PhysicalDeviceLimitsBuilder {
inner: PhysicalDeviceLimits::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceLimitsBuilder<'a> {
inner: PhysicalDeviceLimits,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PhysicalDeviceLimitsBuilder<'a> {
type Target = PhysicalDeviceLimits;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceLimitsBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceLimitsBuilder<'a> {
pub fn max_image_dimension1_d(
mut self,
max_image_dimension1_d: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_image_dimension1_d = max_image_dimension1_d;
self
}
pub fn max_image_dimension2_d(
mut self,
max_image_dimension2_d: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_image_dimension2_d = max_image_dimension2_d;
self
}
pub fn max_image_dimension3_d(
mut self,
max_image_dimension3_d: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_image_dimension3_d = max_image_dimension3_d;
self
}
pub fn max_image_dimension_cube(
mut self,
max_image_dimension_cube: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_image_dimension_cube = max_image_dimension_cube;
self
}
pub fn max_image_array_layers(
mut self,
max_image_array_layers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_image_array_layers = max_image_array_layers;
self
}
pub fn max_texel_buffer_elements(
mut self,
max_texel_buffer_elements: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_texel_buffer_elements = max_texel_buffer_elements;
self
}
pub fn max_uniform_buffer_range(
mut self,
max_uniform_buffer_range: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_uniform_buffer_range = max_uniform_buffer_range;
self
}
pub fn max_storage_buffer_range(
mut self,
max_storage_buffer_range: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_storage_buffer_range = max_storage_buffer_range;
self
}
pub fn max_push_constants_size(
mut self,
max_push_constants_size: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_push_constants_size = max_push_constants_size;
self
}
pub fn max_memory_allocation_count(
mut self,
max_memory_allocation_count: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_memory_allocation_count = max_memory_allocation_count;
self
}
pub fn max_sampler_allocation_count(
mut self,
max_sampler_allocation_count: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_sampler_allocation_count = max_sampler_allocation_count;
self
}
pub fn buffer_image_granularity(
mut self,
buffer_image_granularity: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.buffer_image_granularity = buffer_image_granularity;
self
}
pub fn sparse_address_space_size(
mut self,
sparse_address_space_size: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sparse_address_space_size = sparse_address_space_size;
self
}
pub fn max_bound_descriptor_sets(
mut self,
max_bound_descriptor_sets: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_bound_descriptor_sets = max_bound_descriptor_sets;
self
}
pub fn max_per_stage_descriptor_samplers(
mut self,
max_per_stage_descriptor_samplers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_samplers = max_per_stage_descriptor_samplers;
self
}
pub fn max_per_stage_descriptor_uniform_buffers(
mut self,
max_per_stage_descriptor_uniform_buffers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_uniform_buffers =
max_per_stage_descriptor_uniform_buffers;
self
}
pub fn max_per_stage_descriptor_storage_buffers(
mut self,
max_per_stage_descriptor_storage_buffers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_storage_buffers =
max_per_stage_descriptor_storage_buffers;
self
}
pub fn max_per_stage_descriptor_sampled_images(
mut self,
max_per_stage_descriptor_sampled_images: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_sampled_images =
max_per_stage_descriptor_sampled_images;
self
}
pub fn max_per_stage_descriptor_storage_images(
mut self,
max_per_stage_descriptor_storage_images: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_storage_images =
max_per_stage_descriptor_storage_images;
self
}
pub fn max_per_stage_descriptor_input_attachments(
mut self,
max_per_stage_descriptor_input_attachments: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_descriptor_input_attachments =
max_per_stage_descriptor_input_attachments;
self
}
pub fn max_per_stage_resources(
mut self,
max_per_stage_resources: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_per_stage_resources = max_per_stage_resources;
self
}
pub fn max_descriptor_set_samplers(
mut self,
max_descriptor_set_samplers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_samplers = max_descriptor_set_samplers;
self
}
pub fn max_descriptor_set_uniform_buffers(
mut self,
max_descriptor_set_uniform_buffers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_uniform_buffers = max_descriptor_set_uniform_buffers;
self
}
pub fn max_descriptor_set_uniform_buffers_dynamic(
mut self,
max_descriptor_set_uniform_buffers_dynamic: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_uniform_buffers_dynamic =
max_descriptor_set_uniform_buffers_dynamic;
self
}
pub fn max_descriptor_set_storage_buffers(
mut self,
max_descriptor_set_storage_buffers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_storage_buffers = max_descriptor_set_storage_buffers;
self
}
pub fn max_descriptor_set_storage_buffers_dynamic(
mut self,
max_descriptor_set_storage_buffers_dynamic: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_storage_buffers_dynamic =
max_descriptor_set_storage_buffers_dynamic;
self
}
pub fn max_descriptor_set_sampled_images(
mut self,
max_descriptor_set_sampled_images: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_sampled_images = max_descriptor_set_sampled_images;
self
}
pub fn max_descriptor_set_storage_images(
mut self,
max_descriptor_set_storage_images: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_storage_images = max_descriptor_set_storage_images;
self
}
pub fn max_descriptor_set_input_attachments(
mut self,
max_descriptor_set_input_attachments: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_descriptor_set_input_attachments = max_descriptor_set_input_attachments;
self
}
pub fn max_vertex_input_attributes(
mut self,
max_vertex_input_attributes: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_vertex_input_attributes = max_vertex_input_attributes;
self
}
pub fn max_vertex_input_bindings(
mut self,
max_vertex_input_bindings: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_vertex_input_bindings = max_vertex_input_bindings;
self
}
pub fn max_vertex_input_attribute_offset(
mut self,
max_vertex_input_attribute_offset: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_vertex_input_attribute_offset = max_vertex_input_attribute_offset;
self
}
pub fn max_vertex_input_binding_stride(
mut self,
max_vertex_input_binding_stride: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_vertex_input_binding_stride = max_vertex_input_binding_stride;
self
}
pub fn max_vertex_output_components(
mut self,
max_vertex_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_vertex_output_components = max_vertex_output_components;
self
}
pub fn max_tessellation_generation_level(
mut self,
max_tessellation_generation_level: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_tessellation_generation_level = max_tessellation_generation_level;
self
}
pub fn max_tessellation_patch_size(
mut self,
max_tessellation_patch_size: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_tessellation_patch_size = max_tessellation_patch_size;
self
}
pub fn max_tessellation_control_per_vertex_input_components(
mut self,
max_tessellation_control_per_vertex_input_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner
.max_tessellation_control_per_vertex_input_components =
max_tessellation_control_per_vertex_input_components;
self
}
pub fn max_tessellation_control_per_vertex_output_components(
mut self,
max_tessellation_control_per_vertex_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner
.max_tessellation_control_per_vertex_output_components =
max_tessellation_control_per_vertex_output_components;
self
}
pub fn max_tessellation_control_per_patch_output_components(
mut self,
max_tessellation_control_per_patch_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner
.max_tessellation_control_per_patch_output_components =
max_tessellation_control_per_patch_output_components;
self
}
pub fn max_tessellation_control_total_output_components(
mut self,
max_tessellation_control_total_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_tessellation_control_total_output_components =
max_tessellation_control_total_output_components;
self
}
pub fn max_tessellation_evaluation_input_components(
mut self,
max_tessellation_evaluation_input_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_tessellation_evaluation_input_components =
max_tessellation_evaluation_input_components;
self
}
pub fn max_tessellation_evaluation_output_components(
mut self,
max_tessellation_evaluation_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_tessellation_evaluation_output_components =
max_tessellation_evaluation_output_components;
self
}
pub fn max_geometry_shader_invocations(
mut self,
max_geometry_shader_invocations: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_geometry_shader_invocations = max_geometry_shader_invocations;
self
}
pub fn max_geometry_input_components(
mut self,
max_geometry_input_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_geometry_input_components = max_geometry_input_components;
self
}
pub fn max_geometry_output_components(
mut self,
max_geometry_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_geometry_output_components = max_geometry_output_components;
self
}
pub fn max_geometry_output_vertices(
mut self,
max_geometry_output_vertices: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_geometry_output_vertices = max_geometry_output_vertices;
self
}
pub fn max_geometry_total_output_components(
mut self,
max_geometry_total_output_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_geometry_total_output_components = max_geometry_total_output_components;
self
}
pub fn max_fragment_input_components(
mut self,
max_fragment_input_components: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_fragment_input_components = max_fragment_input_components;
self
}
pub fn max_fragment_output_attachments(
mut self,
max_fragment_output_attachments: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_fragment_output_attachments = max_fragment_output_attachments;
self
}
pub fn max_fragment_dual_src_attachments(
mut self,
max_fragment_dual_src_attachments: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_fragment_dual_src_attachments = max_fragment_dual_src_attachments;
self
}
pub fn max_fragment_combined_output_resources(
mut self,
max_fragment_combined_output_resources: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_fragment_combined_output_resources = max_fragment_combined_output_resources;
self
}
pub fn max_compute_shared_memory_size(
mut self,
max_compute_shared_memory_size: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_compute_shared_memory_size = max_compute_shared_memory_size;
self
}
pub fn max_compute_work_group_count(
mut self,
max_compute_work_group_count: [u32; 3],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_compute_work_group_count = max_compute_work_group_count;
self
}
pub fn max_compute_work_group_invocations(
mut self,
max_compute_work_group_invocations: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_compute_work_group_invocations = max_compute_work_group_invocations;
self
}
pub fn max_compute_work_group_size(
mut self,
max_compute_work_group_size: [u32; 3],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_compute_work_group_size = max_compute_work_group_size;
self
}
pub fn sub_pixel_precision_bits(
mut self,
sub_pixel_precision_bits: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sub_pixel_precision_bits = sub_pixel_precision_bits;
self
}
pub fn sub_texel_precision_bits(
mut self,
sub_texel_precision_bits: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sub_texel_precision_bits = sub_texel_precision_bits;
self
}
pub fn mipmap_precision_bits(
mut self,
mipmap_precision_bits: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.mipmap_precision_bits = mipmap_precision_bits;
self
}
pub fn max_draw_indexed_index_value(
mut self,
max_draw_indexed_index_value: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_draw_indexed_index_value = max_draw_indexed_index_value;
self
}
pub fn max_draw_indirect_count(
mut self,
max_draw_indirect_count: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_draw_indirect_count = max_draw_indirect_count;
self
}
pub fn max_sampler_lod_bias(
mut self,
max_sampler_lod_bias: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_sampler_lod_bias = max_sampler_lod_bias;
self
}
pub fn max_sampler_anisotropy(
mut self,
max_sampler_anisotropy: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_sampler_anisotropy = max_sampler_anisotropy;
self
}
pub fn max_viewports(mut self, max_viewports: u32) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_viewports = max_viewports;
self
}
pub fn max_viewport_dimensions(
mut self,
max_viewport_dimensions: [u32; 2],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_viewport_dimensions = max_viewport_dimensions;
self
}
pub fn viewport_bounds_range(
mut self,
viewport_bounds_range: [f32; 2],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.viewport_bounds_range = viewport_bounds_range;
self
}
pub fn viewport_sub_pixel_bits(
mut self,
viewport_sub_pixel_bits: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.viewport_sub_pixel_bits = viewport_sub_pixel_bits;
self
}
pub fn min_memory_map_alignment(
mut self,
min_memory_map_alignment: usize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_memory_map_alignment = min_memory_map_alignment;
self
}
pub fn min_texel_buffer_offset_alignment(
mut self,
min_texel_buffer_offset_alignment: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_texel_buffer_offset_alignment = min_texel_buffer_offset_alignment;
self
}
pub fn min_uniform_buffer_offset_alignment(
mut self,
min_uniform_buffer_offset_alignment: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_uniform_buffer_offset_alignment = min_uniform_buffer_offset_alignment;
self
}
pub fn min_storage_buffer_offset_alignment(
mut self,
min_storage_buffer_offset_alignment: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_storage_buffer_offset_alignment = min_storage_buffer_offset_alignment;
self
}
pub fn min_texel_offset(mut self, min_texel_offset: i32) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_texel_offset = min_texel_offset;
self
}
pub fn max_texel_offset(mut self, max_texel_offset: u32) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_texel_offset = max_texel_offset;
self
}
pub fn min_texel_gather_offset(
mut self,
min_texel_gather_offset: i32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_texel_gather_offset = min_texel_gather_offset;
self
}
pub fn max_texel_gather_offset(
mut self,
max_texel_gather_offset: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_texel_gather_offset = max_texel_gather_offset;
self
}
pub fn min_interpolation_offset(
mut self,
min_interpolation_offset: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.min_interpolation_offset = min_interpolation_offset;
self
}
pub fn max_interpolation_offset(
mut self,
max_interpolation_offset: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_interpolation_offset = max_interpolation_offset;
self
}
pub fn sub_pixel_interpolation_offset_bits(
mut self,
sub_pixel_interpolation_offset_bits: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sub_pixel_interpolation_offset_bits = sub_pixel_interpolation_offset_bits;
self
}
pub fn max_framebuffer_width(
mut self,
max_framebuffer_width: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_framebuffer_width = max_framebuffer_width;
self
}
pub fn max_framebuffer_height(
mut self,
max_framebuffer_height: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_framebuffer_height = max_framebuffer_height;
self
}
pub fn max_framebuffer_layers(
mut self,
max_framebuffer_layers: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_framebuffer_layers = max_framebuffer_layers;
self
}
pub fn framebuffer_color_sample_counts(
mut self,
framebuffer_color_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.framebuffer_color_sample_counts = framebuffer_color_sample_counts;
self
}
pub fn framebuffer_depth_sample_counts(
mut self,
framebuffer_depth_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.framebuffer_depth_sample_counts = framebuffer_depth_sample_counts;
self
}
pub fn framebuffer_stencil_sample_counts(
mut self,
framebuffer_stencil_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.framebuffer_stencil_sample_counts = framebuffer_stencil_sample_counts;
self
}
pub fn framebuffer_no_attachments_sample_counts(
mut self,
framebuffer_no_attachments_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.framebuffer_no_attachments_sample_counts =
framebuffer_no_attachments_sample_counts;
self
}
pub fn max_color_attachments(
mut self,
max_color_attachments: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_color_attachments = max_color_attachments;
self
}
pub fn sampled_image_color_sample_counts(
mut self,
sampled_image_color_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sampled_image_color_sample_counts = sampled_image_color_sample_counts;
self
}
pub fn sampled_image_integer_sample_counts(
mut self,
sampled_image_integer_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sampled_image_integer_sample_counts = sampled_image_integer_sample_counts;
self
}
pub fn sampled_image_depth_sample_counts(
mut self,
sampled_image_depth_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sampled_image_depth_sample_counts = sampled_image_depth_sample_counts;
self
}
pub fn sampled_image_stencil_sample_counts(
mut self,
sampled_image_stencil_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.sampled_image_stencil_sample_counts = sampled_image_stencil_sample_counts;
self
}
pub fn storage_image_sample_counts(
mut self,
storage_image_sample_counts: SampleCountFlags,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.storage_image_sample_counts = storage_image_sample_counts;
self
}
pub fn max_sample_mask_words(
mut self,
max_sample_mask_words: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_sample_mask_words = max_sample_mask_words;
self
}
pub fn timestamp_compute_and_graphics(
mut self,
timestamp_compute_and_graphics: bool,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.timestamp_compute_and_graphics = timestamp_compute_and_graphics.into();
self
}
pub fn timestamp_period(mut self, timestamp_period: f32) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.timestamp_period = timestamp_period;
self
}
pub fn max_clip_distances(
mut self,
max_clip_distances: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_clip_distances = max_clip_distances;
self
}
pub fn max_cull_distances(
mut self,
max_cull_distances: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_cull_distances = max_cull_distances;
self
}
pub fn max_combined_clip_and_cull_distances(
mut self,
max_combined_clip_and_cull_distances: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.max_combined_clip_and_cull_distances = max_combined_clip_and_cull_distances;
self
}
pub fn discrete_queue_priorities(
mut self,
discrete_queue_priorities: u32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.discrete_queue_priorities = discrete_queue_priorities;
self
}
pub fn point_size_range(
mut self,
point_size_range: [f32; 2],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.point_size_range = point_size_range;
self
}
pub fn line_width_range(
mut self,
line_width_range: [f32; 2],
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.line_width_range = line_width_range;
self
}
pub fn point_size_granularity(
mut self,
point_size_granularity: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.point_size_granularity = point_size_granularity;
self
}
pub fn line_width_granularity(
mut self,
line_width_granularity: f32,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.line_width_granularity = line_width_granularity;
self
}
pub fn strict_lines(mut self, strict_lines: bool) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.strict_lines = strict_lines.into();
self
}
pub fn standard_sample_locations(
mut self,
standard_sample_locations: bool,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.standard_sample_locations = standard_sample_locations.into();
self
}
pub fn optimal_buffer_copy_offset_alignment(
mut self,
optimal_buffer_copy_offset_alignment: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.optimal_buffer_copy_offset_alignment = optimal_buffer_copy_offset_alignment;
self
}
pub fn optimal_buffer_copy_row_pitch_alignment(
mut self,
optimal_buffer_copy_row_pitch_alignment: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.optimal_buffer_copy_row_pitch_alignment =
optimal_buffer_copy_row_pitch_alignment;
self
}
pub fn non_coherent_atom_size(
mut self,
non_coherent_atom_size: DeviceSize,
) -> PhysicalDeviceLimitsBuilder<'a> {
self.inner.non_coherent_atom_size = non_coherent_atom_size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceLimits {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphoreCreateInfo.html>"]
pub struct SemaphoreCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: SemaphoreCreateFlags,
}
impl ::std::default::Default for SemaphoreCreateInfo {
fn default() -> SemaphoreCreateInfo {
SemaphoreCreateInfo {
s_type: StructureType::SEMAPHORE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: SemaphoreCreateFlags::default(),
}
}
}
impl SemaphoreCreateInfo {
pub fn builder<'a>() -> SemaphoreCreateInfoBuilder<'a> {
SemaphoreCreateInfoBuilder {
inner: SemaphoreCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SemaphoreCreateInfoBuilder<'a> {
inner: SemaphoreCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSemaphoreCreateInfo {}
impl<'a> ::std::ops::Deref for SemaphoreCreateInfoBuilder<'a> {
type Target = SemaphoreCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SemaphoreCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SemaphoreCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: SemaphoreCreateFlags) -> SemaphoreCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSemaphoreCreateInfo>(
mut self,
next: &'a mut T,
) -> SemaphoreCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SemaphoreCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryPoolCreateInfo.html>"]
pub struct QueryPoolCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: QueryPoolCreateFlags,
pub query_type: QueryType,
pub query_count: u32,
pub pipeline_statistics: QueryPipelineStatisticFlags,
}
impl ::std::default::Default for QueryPoolCreateInfo {
fn default() -> QueryPoolCreateInfo {
QueryPoolCreateInfo {
s_type: StructureType::QUERY_POOL_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: QueryPoolCreateFlags::default(),
query_type: QueryType::default(),
query_count: u32::default(),
pipeline_statistics: QueryPipelineStatisticFlags::default(),
}
}
}
impl QueryPoolCreateInfo {
pub fn builder<'a>() -> QueryPoolCreateInfoBuilder<'a> {
QueryPoolCreateInfoBuilder {
inner: QueryPoolCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct QueryPoolCreateInfoBuilder<'a> {
inner: QueryPoolCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsQueryPoolCreateInfo {}
impl<'a> ::std::ops::Deref for QueryPoolCreateInfoBuilder<'a> {
type Target = QueryPoolCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for QueryPoolCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> QueryPoolCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: QueryPoolCreateFlags) -> QueryPoolCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn query_type(mut self, query_type: QueryType) -> QueryPoolCreateInfoBuilder<'a> {
self.inner.query_type = query_type;
self
}
pub fn query_count(mut self, query_count: u32) -> QueryPoolCreateInfoBuilder<'a> {
self.inner.query_count = query_count;
self
}
pub fn pipeline_statistics(
mut self,
pipeline_statistics: QueryPipelineStatisticFlags,
) -> QueryPoolCreateInfoBuilder<'a> {
self.inner.pipeline_statistics = pipeline_statistics;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsQueryPoolCreateInfo>(
mut self,
next: &'a mut T,
) -> QueryPoolCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> QueryPoolCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFramebufferCreateInfo.html>"]
pub struct FramebufferCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: FramebufferCreateFlags,
pub render_pass: RenderPass,
pub attachment_count: u32,
pub p_attachments: *const ImageView,
pub width: u32,
pub height: u32,
pub layers: u32,
}
impl ::std::default::Default for FramebufferCreateInfo {
fn default() -> FramebufferCreateInfo {
FramebufferCreateInfo {
s_type: StructureType::FRAMEBUFFER_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: FramebufferCreateFlags::default(),
render_pass: RenderPass::default(),
attachment_count: u32::default(),
p_attachments: ::std::ptr::null(),
width: u32::default(),
height: u32::default(),
layers: u32::default(),
}
}
}
impl FramebufferCreateInfo {
pub fn builder<'a>() -> FramebufferCreateInfoBuilder<'a> {
FramebufferCreateInfoBuilder {
inner: FramebufferCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FramebufferCreateInfoBuilder<'a> {
inner: FramebufferCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsFramebufferCreateInfo {}
impl<'a> ::std::ops::Deref for FramebufferCreateInfoBuilder<'a> {
type Target = FramebufferCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FramebufferCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FramebufferCreateInfoBuilder<'a> {
pub fn flags(mut self, flags: FramebufferCreateFlags) -> FramebufferCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn render_pass(mut self, render_pass: RenderPass) -> FramebufferCreateInfoBuilder<'a> {
self.inner.render_pass = render_pass;
self
}
pub fn attachments(mut self, attachments: &'a [ImageView]) -> FramebufferCreateInfoBuilder<'a> {
self.inner.attachment_count = attachments.len() as _;
self.inner.p_attachments = attachments.as_ptr();
self
}
pub fn width(mut self, width: u32) -> FramebufferCreateInfoBuilder<'a> {
self.inner.width = width;
self
}
pub fn height(mut self, height: u32) -> FramebufferCreateInfoBuilder<'a> {
self.inner.height = height;
self
}
pub fn layers(mut self, layers: u32) -> FramebufferCreateInfoBuilder<'a> {
self.inner.layers = layers;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsFramebufferCreateInfo>(
mut self,
next: &'a mut T,
) -> FramebufferCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FramebufferCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDrawIndirectCommand.html>"]
pub struct DrawIndirectCommand {
pub vertex_count: u32,
pub instance_count: u32,
pub first_vertex: u32,
pub first_instance: u32,
}
impl DrawIndirectCommand {
pub fn builder<'a>() -> DrawIndirectCommandBuilder<'a> {
DrawIndirectCommandBuilder {
inner: DrawIndirectCommand::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DrawIndirectCommandBuilder<'a> {
inner: DrawIndirectCommand,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DrawIndirectCommandBuilder<'a> {
type Target = DrawIndirectCommand;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DrawIndirectCommandBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DrawIndirectCommandBuilder<'a> {
pub fn vertex_count(mut self, vertex_count: u32) -> DrawIndirectCommandBuilder<'a> {
self.inner.vertex_count = vertex_count;
self
}
pub fn instance_count(mut self, instance_count: u32) -> DrawIndirectCommandBuilder<'a> {
self.inner.instance_count = instance_count;
self
}
pub fn first_vertex(mut self, first_vertex: u32) -> DrawIndirectCommandBuilder<'a> {
self.inner.first_vertex = first_vertex;
self
}
pub fn first_instance(mut self, first_instance: u32) -> DrawIndirectCommandBuilder<'a> {
self.inner.first_instance = first_instance;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DrawIndirectCommand {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDrawIndexedIndirectCommand.html>"]
pub struct DrawIndexedIndirectCommand {
pub index_count: u32,
pub instance_count: u32,
pub first_index: u32,
pub vertex_offset: i32,
pub first_instance: u32,
}
impl DrawIndexedIndirectCommand {
pub fn builder<'a>() -> DrawIndexedIndirectCommandBuilder<'a> {
DrawIndexedIndirectCommandBuilder {
inner: DrawIndexedIndirectCommand::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DrawIndexedIndirectCommandBuilder<'a> {
inner: DrawIndexedIndirectCommand,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DrawIndexedIndirectCommandBuilder<'a> {
type Target = DrawIndexedIndirectCommand;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DrawIndexedIndirectCommandBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DrawIndexedIndirectCommandBuilder<'a> {
pub fn index_count(mut self, index_count: u32) -> DrawIndexedIndirectCommandBuilder<'a> {
self.inner.index_count = index_count;
self
}
pub fn instance_count(mut self, instance_count: u32) -> DrawIndexedIndirectCommandBuilder<'a> {
self.inner.instance_count = instance_count;
self
}
pub fn first_index(mut self, first_index: u32) -> DrawIndexedIndirectCommandBuilder<'a> {
self.inner.first_index = first_index;
self
}
pub fn vertex_offset(mut self, vertex_offset: i32) -> DrawIndexedIndirectCommandBuilder<'a> {
self.inner.vertex_offset = vertex_offset;
self
}
pub fn first_instance(mut self, first_instance: u32) -> DrawIndexedIndirectCommandBuilder<'a> {
self.inner.first_instance = first_instance;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DrawIndexedIndirectCommand {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDispatchIndirectCommand.html>"]
pub struct DispatchIndirectCommand {
pub x: u32,
pub y: u32,
pub z: u32,
}
impl DispatchIndirectCommand {
pub fn builder<'a>() -> DispatchIndirectCommandBuilder<'a> {
DispatchIndirectCommandBuilder {
inner: DispatchIndirectCommand::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DispatchIndirectCommandBuilder<'a> {
inner: DispatchIndirectCommand,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DispatchIndirectCommandBuilder<'a> {
type Target = DispatchIndirectCommand;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DispatchIndirectCommandBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DispatchIndirectCommandBuilder<'a> {
pub fn x(mut self, x: u32) -> DispatchIndirectCommandBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: u32) -> DispatchIndirectCommandBuilder<'a> {
self.inner.y = y;
self
}
pub fn z(mut self, z: u32) -> DispatchIndirectCommandBuilder<'a> {
self.inner.z = z;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DispatchIndirectCommand {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubmitInfo.html>"]
pub struct SubmitInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub wait_semaphore_count: u32,
pub p_wait_semaphores: *const Semaphore,
pub p_wait_dst_stage_mask: *const PipelineStageFlags,
pub command_buffer_count: u32,
pub p_command_buffers: *const CommandBuffer,
pub signal_semaphore_count: u32,
pub p_signal_semaphores: *const Semaphore,
}
impl ::std::default::Default for SubmitInfo {
fn default() -> SubmitInfo {
SubmitInfo {
s_type: StructureType::SUBMIT_INFO,
p_next: ::std::ptr::null(),
wait_semaphore_count: u32::default(),
p_wait_semaphores: ::std::ptr::null(),
p_wait_dst_stage_mask: ::std::ptr::null(),
command_buffer_count: u32::default(),
p_command_buffers: ::std::ptr::null(),
signal_semaphore_count: u32::default(),
p_signal_semaphores: ::std::ptr::null(),
}
}
}
impl SubmitInfo {
pub fn builder<'a>() -> SubmitInfoBuilder<'a> {
SubmitInfoBuilder {
inner: SubmitInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubmitInfoBuilder<'a> {
inner: SubmitInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSubmitInfo {}
impl<'a> ::std::ops::Deref for SubmitInfoBuilder<'a> {
type Target = SubmitInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubmitInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubmitInfoBuilder<'a> {
pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> SubmitInfoBuilder<'a> {
self.inner.wait_semaphore_count = wait_semaphores.len() as _;
self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
self
}
pub fn wait_dst_stage_mask(
mut self,
wait_dst_stage_mask: &'a [PipelineStageFlags],
) -> SubmitInfoBuilder<'a> {
self.inner.wait_semaphore_count = wait_dst_stage_mask.len() as _;
self.inner.p_wait_dst_stage_mask = wait_dst_stage_mask.as_ptr();
self
}
pub fn command_buffers(
mut self,
command_buffers: &'a [CommandBuffer],
) -> SubmitInfoBuilder<'a> {
self.inner.command_buffer_count = command_buffers.len() as _;
self.inner.p_command_buffers = command_buffers.as_ptr();
self
}
pub fn signal_semaphores(
mut self,
signal_semaphores: &'a [Semaphore],
) -> SubmitInfoBuilder<'a> {
self.inner.signal_semaphore_count = signal_semaphores.len() as _;
self.inner.p_signal_semaphores = signal_semaphores.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSubmitInfo>(mut self, next: &'a mut T) -> SubmitInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubmitInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPropertiesKHR.html>"]
pub struct DisplayPropertiesKHR {
pub display: DisplayKHR,
pub display_name: *const c_char,
pub physical_dimensions: Extent2D,
pub physical_resolution: Extent2D,
pub supported_transforms: SurfaceTransformFlagsKHR,
pub plane_reorder_possible: Bool32,
pub persistent_content: Bool32,
}
impl ::std::default::Default for DisplayPropertiesKHR {
fn default() -> DisplayPropertiesKHR {
DisplayPropertiesKHR {
display: DisplayKHR::default(),
display_name: ::std::ptr::null(),
physical_dimensions: Extent2D::default(),
physical_resolution: Extent2D::default(),
supported_transforms: SurfaceTransformFlagsKHR::default(),
plane_reorder_possible: Bool32::default(),
persistent_content: Bool32::default(),
}
}
}
impl DisplayPropertiesKHR {
pub fn builder<'a>() -> DisplayPropertiesKHRBuilder<'a> {
DisplayPropertiesKHRBuilder {
inner: DisplayPropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPropertiesKHRBuilder<'a> {
inner: DisplayPropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DisplayPropertiesKHRBuilder<'a> {
type Target = DisplayPropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPropertiesKHRBuilder<'a> {
pub fn display(mut self, display: DisplayKHR) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.display = display;
self
}
pub fn display_name(mut self, display_name: *const c_char) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.display_name = display_name;
self
}
pub fn physical_dimensions(
mut self,
physical_dimensions: Extent2D,
) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.physical_dimensions = physical_dimensions;
self
}
pub fn physical_resolution(
mut self,
physical_resolution: Extent2D,
) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.physical_resolution = physical_resolution;
self
}
pub fn supported_transforms(
mut self,
supported_transforms: SurfaceTransformFlagsKHR,
) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.supported_transforms = supported_transforms;
self
}
pub fn plane_reorder_possible(
mut self,
plane_reorder_possible: bool,
) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.plane_reorder_possible = plane_reorder_possible.into();
self
}
pub fn persistent_content(
mut self,
persistent_content: bool,
) -> DisplayPropertiesKHRBuilder<'a> {
self.inner.persistent_content = persistent_content.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlanePropertiesKHR.html>"]
pub struct DisplayPlanePropertiesKHR {
pub current_display: DisplayKHR,
pub current_stack_index: u32,
}
impl DisplayPlanePropertiesKHR {
pub fn builder<'a>() -> DisplayPlanePropertiesKHRBuilder<'a> {
DisplayPlanePropertiesKHRBuilder {
inner: DisplayPlanePropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPlanePropertiesKHRBuilder<'a> {
inner: DisplayPlanePropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DisplayPlanePropertiesKHRBuilder<'a> {
type Target = DisplayPlanePropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPlanePropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPlanePropertiesKHRBuilder<'a> {
pub fn current_display(
mut self,
current_display: DisplayKHR,
) -> DisplayPlanePropertiesKHRBuilder<'a> {
self.inner.current_display = current_display;
self
}
pub fn current_stack_index(
mut self,
current_stack_index: u32,
) -> DisplayPlanePropertiesKHRBuilder<'a> {
self.inner.current_stack_index = current_stack_index;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPlanePropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModeParametersKHR.html>"]
pub struct DisplayModeParametersKHR {
pub visible_region: Extent2D,
pub refresh_rate: u32,
}
impl DisplayModeParametersKHR {
pub fn builder<'a>() -> DisplayModeParametersKHRBuilder<'a> {
DisplayModeParametersKHRBuilder {
inner: DisplayModeParametersKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayModeParametersKHRBuilder<'a> {
inner: DisplayModeParametersKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DisplayModeParametersKHRBuilder<'a> {
type Target = DisplayModeParametersKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayModeParametersKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayModeParametersKHRBuilder<'a> {
pub fn visible_region(
mut self,
visible_region: Extent2D,
) -> DisplayModeParametersKHRBuilder<'a> {
self.inner.visible_region = visible_region;
self
}
pub fn refresh_rate(mut self, refresh_rate: u32) -> DisplayModeParametersKHRBuilder<'a> {
self.inner.refresh_rate = refresh_rate;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayModeParametersKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModePropertiesKHR.html>"]
pub struct DisplayModePropertiesKHR {
pub display_mode: DisplayModeKHR,
pub parameters: DisplayModeParametersKHR,
}
impl DisplayModePropertiesKHR {
pub fn builder<'a>() -> DisplayModePropertiesKHRBuilder<'a> {
DisplayModePropertiesKHRBuilder {
inner: DisplayModePropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayModePropertiesKHRBuilder<'a> {
inner: DisplayModePropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DisplayModePropertiesKHRBuilder<'a> {
type Target = DisplayModePropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayModePropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayModePropertiesKHRBuilder<'a> {
pub fn display_mode(
mut self,
display_mode: DisplayModeKHR,
) -> DisplayModePropertiesKHRBuilder<'a> {
self.inner.display_mode = display_mode;
self
}
pub fn parameters(
mut self,
parameters: DisplayModeParametersKHR,
) -> DisplayModePropertiesKHRBuilder<'a> {
self.inner.parameters = parameters;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayModePropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModeCreateInfoKHR.html>"]
pub struct DisplayModeCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DisplayModeCreateFlagsKHR,
pub parameters: DisplayModeParametersKHR,
}
impl ::std::default::Default for DisplayModeCreateInfoKHR {
fn default() -> DisplayModeCreateInfoKHR {
DisplayModeCreateInfoKHR {
s_type: StructureType::DISPLAY_MODE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: DisplayModeCreateFlagsKHR::default(),
parameters: DisplayModeParametersKHR::default(),
}
}
}
impl DisplayModeCreateInfoKHR {
pub fn builder<'a>() -> DisplayModeCreateInfoKHRBuilder<'a> {
DisplayModeCreateInfoKHRBuilder {
inner: DisplayModeCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayModeCreateInfoKHRBuilder<'a> {
inner: DisplayModeCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayModeCreateInfoKHR {}
impl<'a> ::std::ops::Deref for DisplayModeCreateInfoKHRBuilder<'a> {
type Target = DisplayModeCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayModeCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayModeCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: DisplayModeCreateFlagsKHR,
) -> DisplayModeCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn parameters(
mut self,
parameters: DisplayModeParametersKHR,
) -> DisplayModeCreateInfoKHRBuilder<'a> {
self.inner.parameters = parameters;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayModeCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> DisplayModeCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayModeCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlaneCapabilitiesKHR.html>"]
pub struct DisplayPlaneCapabilitiesKHR {
pub supported_alpha: DisplayPlaneAlphaFlagsKHR,
pub min_src_position: Offset2D,
pub max_src_position: Offset2D,
pub min_src_extent: Extent2D,
pub max_src_extent: Extent2D,
pub min_dst_position: Offset2D,
pub max_dst_position: Offset2D,
pub min_dst_extent: Extent2D,
pub max_dst_extent: Extent2D,
}
impl DisplayPlaneCapabilitiesKHR {
pub fn builder<'a>() -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
DisplayPlaneCapabilitiesKHRBuilder {
inner: DisplayPlaneCapabilitiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPlaneCapabilitiesKHRBuilder<'a> {
inner: DisplayPlaneCapabilitiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DisplayPlaneCapabilitiesKHRBuilder<'a> {
type Target = DisplayPlaneCapabilitiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPlaneCapabilitiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPlaneCapabilitiesKHRBuilder<'a> {
pub fn supported_alpha(
mut self,
supported_alpha: DisplayPlaneAlphaFlagsKHR,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.supported_alpha = supported_alpha;
self
}
pub fn min_src_position(
mut self,
min_src_position: Offset2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.min_src_position = min_src_position;
self
}
pub fn max_src_position(
mut self,
max_src_position: Offset2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.max_src_position = max_src_position;
self
}
pub fn min_src_extent(
mut self,
min_src_extent: Extent2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.min_src_extent = min_src_extent;
self
}
pub fn max_src_extent(
mut self,
max_src_extent: Extent2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.max_src_extent = max_src_extent;
self
}
pub fn min_dst_position(
mut self,
min_dst_position: Offset2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.min_dst_position = min_dst_position;
self
}
pub fn max_dst_position(
mut self,
max_dst_position: Offset2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.max_dst_position = max_dst_position;
self
}
pub fn min_dst_extent(
mut self,
min_dst_extent: Extent2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.min_dst_extent = min_dst_extent;
self
}
pub fn max_dst_extent(
mut self,
max_dst_extent: Extent2D,
) -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
self.inner.max_dst_extent = max_dst_extent;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPlaneCapabilitiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplaySurfaceCreateInfoKHR.html>"]
pub struct DisplaySurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DisplaySurfaceCreateFlagsKHR,
pub display_mode: DisplayModeKHR,
pub plane_index: u32,
pub plane_stack_index: u32,
pub transform: SurfaceTransformFlagsKHR,
pub global_alpha: f32,
pub alpha_mode: DisplayPlaneAlphaFlagsKHR,
pub image_extent: Extent2D,
}
impl ::std::default::Default for DisplaySurfaceCreateInfoKHR {
fn default() -> DisplaySurfaceCreateInfoKHR {
DisplaySurfaceCreateInfoKHR {
s_type: StructureType::DISPLAY_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: DisplaySurfaceCreateFlagsKHR::default(),
display_mode: DisplayModeKHR::default(),
plane_index: u32::default(),
plane_stack_index: u32::default(),
transform: SurfaceTransformFlagsKHR::default(),
global_alpha: f32::default(),
alpha_mode: DisplayPlaneAlphaFlagsKHR::default(),
image_extent: Extent2D::default(),
}
}
}
impl DisplaySurfaceCreateInfoKHR {
pub fn builder<'a>() -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
DisplaySurfaceCreateInfoKHRBuilder {
inner: DisplaySurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplaySurfaceCreateInfoKHRBuilder<'a> {
inner: DisplaySurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplaySurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for DisplaySurfaceCreateInfoKHRBuilder<'a> {
type Target = DisplaySurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplaySurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplaySurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: DisplaySurfaceCreateFlagsKHR,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn display_mode(
mut self,
display_mode: DisplayModeKHR,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.display_mode = display_mode;
self
}
pub fn plane_index(mut self, plane_index: u32) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.plane_index = plane_index;
self
}
pub fn plane_stack_index(
mut self,
plane_stack_index: u32,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.plane_stack_index = plane_stack_index;
self
}
pub fn transform(
mut self,
transform: SurfaceTransformFlagsKHR,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.transform = transform;
self
}
pub fn global_alpha(mut self, global_alpha: f32) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.global_alpha = global_alpha;
self
}
pub fn alpha_mode(
mut self,
alpha_mode: DisplayPlaneAlphaFlagsKHR,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.alpha_mode = alpha_mode;
self
}
pub fn image_extent(
mut self,
image_extent: Extent2D,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
self.inner.image_extent = image_extent;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplaySurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplaySurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPresentInfoKHR.html>"]
pub struct DisplayPresentInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_rect: Rect2D,
pub dst_rect: Rect2D,
pub persistent: Bool32,
}
impl ::std::default::Default for DisplayPresentInfoKHR {
fn default() -> DisplayPresentInfoKHR {
DisplayPresentInfoKHR {
s_type: StructureType::DISPLAY_PRESENT_INFO_KHR,
p_next: ::std::ptr::null(),
src_rect: Rect2D::default(),
dst_rect: Rect2D::default(),
persistent: Bool32::default(),
}
}
}
impl DisplayPresentInfoKHR {
pub fn builder<'a>() -> DisplayPresentInfoKHRBuilder<'a> {
DisplayPresentInfoKHRBuilder {
inner: DisplayPresentInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPresentInfoKHRBuilder<'a> {
inner: DisplayPresentInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPresentInfoKHR for DisplayPresentInfoKHRBuilder<'_> {}
unsafe impl ExtendsPresentInfoKHR for DisplayPresentInfoKHR {}
impl<'a> ::std::ops::Deref for DisplayPresentInfoKHRBuilder<'a> {
type Target = DisplayPresentInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPresentInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPresentInfoKHRBuilder<'a> {
pub fn src_rect(mut self, src_rect: Rect2D) -> DisplayPresentInfoKHRBuilder<'a> {
self.inner.src_rect = src_rect;
self
}
pub fn dst_rect(mut self, dst_rect: Rect2D) -> DisplayPresentInfoKHRBuilder<'a> {
self.inner.dst_rect = dst_rect;
self
}
pub fn persistent(mut self, persistent: bool) -> DisplayPresentInfoKHRBuilder<'a> {
self.inner.persistent = persistent.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPresentInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceCapabilitiesKHR.html>"]
pub struct SurfaceCapabilitiesKHR {
pub min_image_count: u32,
pub max_image_count: u32,
pub current_extent: Extent2D,
pub min_image_extent: Extent2D,
pub max_image_extent: Extent2D,
pub max_image_array_layers: u32,
pub supported_transforms: SurfaceTransformFlagsKHR,
pub current_transform: SurfaceTransformFlagsKHR,
pub supported_composite_alpha: CompositeAlphaFlagsKHR,
pub supported_usage_flags: ImageUsageFlags,
}
impl SurfaceCapabilitiesKHR {
pub fn builder<'a>() -> SurfaceCapabilitiesKHRBuilder<'a> {
SurfaceCapabilitiesKHRBuilder {
inner: SurfaceCapabilitiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SurfaceCapabilitiesKHRBuilder<'a> {
inner: SurfaceCapabilitiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SurfaceCapabilitiesKHRBuilder<'a> {
type Target = SurfaceCapabilitiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SurfaceCapabilitiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SurfaceCapabilitiesKHRBuilder<'a> {
pub fn min_image_count(mut self, min_image_count: u32) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.min_image_count = min_image_count;
self
}
pub fn max_image_count(mut self, max_image_count: u32) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.max_image_count = max_image_count;
self
}
pub fn current_extent(mut self, current_extent: Extent2D) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.current_extent = current_extent;
self
}
pub fn min_image_extent(
mut self,
min_image_extent: Extent2D,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.min_image_extent = min_image_extent;
self
}
pub fn max_image_extent(
mut self,
max_image_extent: Extent2D,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.max_image_extent = max_image_extent;
self
}
pub fn max_image_array_layers(
mut self,
max_image_array_layers: u32,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.max_image_array_layers = max_image_array_layers;
self
}
pub fn supported_transforms(
mut self,
supported_transforms: SurfaceTransformFlagsKHR,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.supported_transforms = supported_transforms;
self
}
pub fn current_transform(
mut self,
current_transform: SurfaceTransformFlagsKHR,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.current_transform = current_transform;
self
}
pub fn supported_composite_alpha(
mut self,
supported_composite_alpha: CompositeAlphaFlagsKHR,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.supported_composite_alpha = supported_composite_alpha;
self
}
pub fn supported_usage_flags(
mut self,
supported_usage_flags: ImageUsageFlags,
) -> SurfaceCapabilitiesKHRBuilder<'a> {
self.inner.supported_usage_flags = supported_usage_flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SurfaceCapabilitiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAndroidSurfaceCreateInfoKHR.html>"]
pub struct AndroidSurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: AndroidSurfaceCreateFlagsKHR,
pub window: *mut ANativeWindow,
}
impl ::std::default::Default for AndroidSurfaceCreateInfoKHR {
fn default() -> AndroidSurfaceCreateInfoKHR {
AndroidSurfaceCreateInfoKHR {
s_type: StructureType::ANDROID_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: AndroidSurfaceCreateFlagsKHR::default(),
window: ::std::ptr::null_mut(),
}
}
}
impl AndroidSurfaceCreateInfoKHR {
pub fn builder<'a>() -> AndroidSurfaceCreateInfoKHRBuilder<'a> {
AndroidSurfaceCreateInfoKHRBuilder {
inner: AndroidSurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AndroidSurfaceCreateInfoKHRBuilder<'a> {
inner: AndroidSurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAndroidSurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for AndroidSurfaceCreateInfoKHRBuilder<'a> {
type Target = AndroidSurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AndroidSurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AndroidSurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: AndroidSurfaceCreateFlagsKHR,
) -> AndroidSurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn window(mut self, window: *mut ANativeWindow) -> AndroidSurfaceCreateInfoKHRBuilder<'a> {
self.inner.window = window;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAndroidSurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> AndroidSurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AndroidSurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViSurfaceCreateInfoNN.html>"]
pub struct ViSurfaceCreateInfoNN {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ViSurfaceCreateFlagsNN,
pub window: *mut c_void,
}
impl ::std::default::Default for ViSurfaceCreateInfoNN {
fn default() -> ViSurfaceCreateInfoNN {
ViSurfaceCreateInfoNN {
s_type: StructureType::VI_SURFACE_CREATE_INFO_NN,
p_next: ::std::ptr::null(),
flags: ViSurfaceCreateFlagsNN::default(),
window: ::std::ptr::null_mut(),
}
}
}
impl ViSurfaceCreateInfoNN {
pub fn builder<'a>() -> ViSurfaceCreateInfoNNBuilder<'a> {
ViSurfaceCreateInfoNNBuilder {
inner: ViSurfaceCreateInfoNN::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ViSurfaceCreateInfoNNBuilder<'a> {
inner: ViSurfaceCreateInfoNN,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsViSurfaceCreateInfoNN {}
impl<'a> ::std::ops::Deref for ViSurfaceCreateInfoNNBuilder<'a> {
type Target = ViSurfaceCreateInfoNN;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ViSurfaceCreateInfoNNBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ViSurfaceCreateInfoNNBuilder<'a> {
pub fn flags(mut self, flags: ViSurfaceCreateFlagsNN) -> ViSurfaceCreateInfoNNBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn window(mut self, window: *mut c_void) -> ViSurfaceCreateInfoNNBuilder<'a> {
self.inner.window = window;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsViSurfaceCreateInfoNN>(
mut self,
next: &'a mut T,
) -> ViSurfaceCreateInfoNNBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ViSurfaceCreateInfoNN {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWaylandSurfaceCreateInfoKHR.html>"]
pub struct WaylandSurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: WaylandSurfaceCreateFlagsKHR,
pub display: *mut wl_display,
pub surface: *mut wl_surface,
}
impl ::std::default::Default for WaylandSurfaceCreateInfoKHR {
fn default() -> WaylandSurfaceCreateInfoKHR {
WaylandSurfaceCreateInfoKHR {
s_type: StructureType::WAYLAND_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: WaylandSurfaceCreateFlagsKHR::default(),
display: ::std::ptr::null_mut(),
surface: ::std::ptr::null_mut(),
}
}
}
impl WaylandSurfaceCreateInfoKHR {
pub fn builder<'a>() -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
WaylandSurfaceCreateInfoKHRBuilder {
inner: WaylandSurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct WaylandSurfaceCreateInfoKHRBuilder<'a> {
inner: WaylandSurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsWaylandSurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for WaylandSurfaceCreateInfoKHRBuilder<'a> {
type Target = WaylandSurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for WaylandSurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> WaylandSurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: WaylandSurfaceCreateFlagsKHR,
) -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn display(mut self, display: *mut wl_display) -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
self.inner.display = display;
self
}
pub fn surface(mut self, surface: *mut wl_surface) -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
self.inner.surface = surface;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsWaylandSurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> WaylandSurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWin32SurfaceCreateInfoKHR.html>"]
pub struct Win32SurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: Win32SurfaceCreateFlagsKHR,
pub hinstance: HINSTANCE,
pub hwnd: HWND,
}
impl ::std::default::Default for Win32SurfaceCreateInfoKHR {
fn default() -> Win32SurfaceCreateInfoKHR {
Win32SurfaceCreateInfoKHR {
s_type: StructureType::WIN32_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: Win32SurfaceCreateFlagsKHR::default(),
hinstance: unsafe { ::std::mem::zeroed() },
hwnd: unsafe { ::std::mem::zeroed() },
}
}
}
impl Win32SurfaceCreateInfoKHR {
pub fn builder<'a>() -> Win32SurfaceCreateInfoKHRBuilder<'a> {
Win32SurfaceCreateInfoKHRBuilder {
inner: Win32SurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Win32SurfaceCreateInfoKHRBuilder<'a> {
inner: Win32SurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsWin32SurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for Win32SurfaceCreateInfoKHRBuilder<'a> {
type Target = Win32SurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Win32SurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Win32SurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: Win32SurfaceCreateFlagsKHR,
) -> Win32SurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn hinstance(mut self, hinstance: HINSTANCE) -> Win32SurfaceCreateInfoKHRBuilder<'a> {
self.inner.hinstance = hinstance;
self
}
pub fn hwnd(mut self, hwnd: HWND) -> Win32SurfaceCreateInfoKHRBuilder<'a> {
self.inner.hwnd = hwnd;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsWin32SurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> Win32SurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Win32SurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkXlibSurfaceCreateInfoKHR.html>"]
pub struct XlibSurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: XlibSurfaceCreateFlagsKHR,
pub dpy: *mut Display,
pub window: Window,
}
impl ::std::default::Default for XlibSurfaceCreateInfoKHR {
fn default() -> XlibSurfaceCreateInfoKHR {
XlibSurfaceCreateInfoKHR {
s_type: StructureType::XLIB_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: XlibSurfaceCreateFlagsKHR::default(),
dpy: ::std::ptr::null_mut(),
window: Window::default(),
}
}
}
impl XlibSurfaceCreateInfoKHR {
pub fn builder<'a>() -> XlibSurfaceCreateInfoKHRBuilder<'a> {
XlibSurfaceCreateInfoKHRBuilder {
inner: XlibSurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct XlibSurfaceCreateInfoKHRBuilder<'a> {
inner: XlibSurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsXlibSurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for XlibSurfaceCreateInfoKHRBuilder<'a> {
type Target = XlibSurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for XlibSurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> XlibSurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(
mut self,
flags: XlibSurfaceCreateFlagsKHR,
) -> XlibSurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn dpy(mut self, dpy: *mut Display) -> XlibSurfaceCreateInfoKHRBuilder<'a> {
self.inner.dpy = dpy;
self
}
pub fn window(mut self, window: Window) -> XlibSurfaceCreateInfoKHRBuilder<'a> {
self.inner.window = window;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsXlibSurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> XlibSurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> XlibSurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkXcbSurfaceCreateInfoKHR.html>"]
pub struct XcbSurfaceCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: XcbSurfaceCreateFlagsKHR,
pub connection: *mut xcb_connection_t,
pub window: xcb_window_t,
}
impl ::std::default::Default for XcbSurfaceCreateInfoKHR {
fn default() -> XcbSurfaceCreateInfoKHR {
XcbSurfaceCreateInfoKHR {
s_type: StructureType::XCB_SURFACE_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: XcbSurfaceCreateFlagsKHR::default(),
connection: ::std::ptr::null_mut(),
window: xcb_window_t::default(),
}
}
}
impl XcbSurfaceCreateInfoKHR {
pub fn builder<'a>() -> XcbSurfaceCreateInfoKHRBuilder<'a> {
XcbSurfaceCreateInfoKHRBuilder {
inner: XcbSurfaceCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct XcbSurfaceCreateInfoKHRBuilder<'a> {
inner: XcbSurfaceCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsXcbSurfaceCreateInfoKHR {}
impl<'a> ::std::ops::Deref for XcbSurfaceCreateInfoKHRBuilder<'a> {
type Target = XcbSurfaceCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for XcbSurfaceCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> XcbSurfaceCreateInfoKHRBuilder<'a> {
pub fn flags(mut self, flags: XcbSurfaceCreateFlagsKHR) -> XcbSurfaceCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn connection(
mut self,
connection: *mut xcb_connection_t,
) -> XcbSurfaceCreateInfoKHRBuilder<'a> {
self.inner.connection = connection;
self
}
pub fn window(mut self, window: xcb_window_t) -> XcbSurfaceCreateInfoKHRBuilder<'a> {
self.inner.window = window;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsXcbSurfaceCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> XcbSurfaceCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> XcbSurfaceCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html>"]
pub struct ImagePipeSurfaceCreateInfoFUCHSIA {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ImagePipeSurfaceCreateFlagsFUCHSIA,
pub image_pipe_handle: zx_handle_t,
}
impl ::std::default::Default for ImagePipeSurfaceCreateInfoFUCHSIA {
fn default() -> ImagePipeSurfaceCreateInfoFUCHSIA {
ImagePipeSurfaceCreateInfoFUCHSIA {
s_type: StructureType::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
p_next: ::std::ptr::null(),
flags: ImagePipeSurfaceCreateFlagsFUCHSIA::default(),
image_pipe_handle: zx_handle_t::default(),
}
}
}
impl ImagePipeSurfaceCreateInfoFUCHSIA {
pub fn builder<'a>() -> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
ImagePipeSurfaceCreateInfoFUCHSIABuilder {
inner: ImagePipeSurfaceCreateInfoFUCHSIA::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
inner: ImagePipeSurfaceCreateInfoFUCHSIA,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImagePipeSurfaceCreateInfoFUCHSIA {}
impl<'a> ::std::ops::Deref for ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
type Target = ImagePipeSurfaceCreateInfoFUCHSIA;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
pub fn flags(
mut self,
flags: ImagePipeSurfaceCreateFlagsFUCHSIA,
) -> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
self.inner.flags = flags;
self
}
pub fn image_pipe_handle(
mut self,
image_pipe_handle: zx_handle_t,
) -> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
self.inner.image_pipe_handle = image_pipe_handle;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImagePipeSurfaceCreateInfoFUCHSIA>(
mut self,
next: &'a mut T,
) -> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImagePipeSurfaceCreateInfoFUCHSIA {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceFormatKHR.html>"]
pub struct SurfaceFormatKHR {
pub format: Format,
pub color_space: ColorSpaceKHR,
}
impl SurfaceFormatKHR {
pub fn builder<'a>() -> SurfaceFormatKHRBuilder<'a> {
SurfaceFormatKHRBuilder {
inner: SurfaceFormatKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SurfaceFormatKHRBuilder<'a> {
inner: SurfaceFormatKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SurfaceFormatKHRBuilder<'a> {
type Target = SurfaceFormatKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SurfaceFormatKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SurfaceFormatKHRBuilder<'a> {
pub fn format(mut self, format: Format) -> SurfaceFormatKHRBuilder<'a> {
self.inner.format = format;
self
}
pub fn color_space(mut self, color_space: ColorSpaceKHR) -> SurfaceFormatKHRBuilder<'a> {
self.inner.color_space = color_space;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SurfaceFormatKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSwapchainCreateInfoKHR.html>"]
pub struct SwapchainCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: SwapchainCreateFlagsKHR,
pub surface: SurfaceKHR,
pub min_image_count: u32,
pub image_format: Format,
pub image_color_space: ColorSpaceKHR,
pub image_extent: Extent2D,
pub image_array_layers: u32,
pub image_usage: ImageUsageFlags,
pub image_sharing_mode: SharingMode,
pub queue_family_index_count: u32,
pub p_queue_family_indices: *const u32,
pub pre_transform: SurfaceTransformFlagsKHR,
pub composite_alpha: CompositeAlphaFlagsKHR,
pub present_mode: PresentModeKHR,
pub clipped: Bool32,
pub old_swapchain: SwapchainKHR,
}
impl ::std::default::Default for SwapchainCreateInfoKHR {
fn default() -> SwapchainCreateInfoKHR {
SwapchainCreateInfoKHR {
s_type: StructureType::SWAPCHAIN_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
flags: SwapchainCreateFlagsKHR::default(),
surface: SurfaceKHR::default(),
min_image_count: u32::default(),
image_format: Format::default(),
image_color_space: ColorSpaceKHR::default(),
image_extent: Extent2D::default(),
image_array_layers: u32::default(),
image_usage: ImageUsageFlags::default(),
image_sharing_mode: SharingMode::default(),
queue_family_index_count: u32::default(),
p_queue_family_indices: ::std::ptr::null(),
pre_transform: SurfaceTransformFlagsKHR::default(),
composite_alpha: CompositeAlphaFlagsKHR::default(),
present_mode: PresentModeKHR::default(),
clipped: Bool32::default(),
old_swapchain: SwapchainKHR::default(),
}
}
}
impl SwapchainCreateInfoKHR {
pub fn builder<'a>() -> SwapchainCreateInfoKHRBuilder<'a> {
SwapchainCreateInfoKHRBuilder {
inner: SwapchainCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SwapchainCreateInfoKHRBuilder<'a> {
inner: SwapchainCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSwapchainCreateInfoKHR {}
impl<'a> ::std::ops::Deref for SwapchainCreateInfoKHRBuilder<'a> {
type Target = SwapchainCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SwapchainCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SwapchainCreateInfoKHRBuilder<'a> {
pub fn flags(mut self, flags: SwapchainCreateFlagsKHR) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn surface(mut self, surface: SurfaceKHR) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.surface = surface;
self
}
pub fn min_image_count(mut self, min_image_count: u32) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.min_image_count = min_image_count;
self
}
pub fn image_format(mut self, image_format: Format) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_format = image_format;
self
}
pub fn image_color_space(
mut self,
image_color_space: ColorSpaceKHR,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_color_space = image_color_space;
self
}
pub fn image_extent(mut self, image_extent: Extent2D) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_extent = image_extent;
self
}
pub fn image_array_layers(
mut self,
image_array_layers: u32,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_array_layers = image_array_layers;
self
}
pub fn image_usage(
mut self,
image_usage: ImageUsageFlags,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_usage = image_usage;
self
}
pub fn image_sharing_mode(
mut self,
image_sharing_mode: SharingMode,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.image_sharing_mode = image_sharing_mode;
self
}
pub fn queue_family_indices(
mut self,
queue_family_indices: &'a [u32],
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.queue_family_index_count = queue_family_indices.len() as _;
self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
self
}
pub fn pre_transform(
mut self,
pre_transform: SurfaceTransformFlagsKHR,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.pre_transform = pre_transform;
self
}
pub fn composite_alpha(
mut self,
composite_alpha: CompositeAlphaFlagsKHR,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.composite_alpha = composite_alpha;
self
}
pub fn present_mode(
mut self,
present_mode: PresentModeKHR,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.present_mode = present_mode;
self
}
pub fn clipped(mut self, clipped: bool) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.clipped = clipped.into();
self
}
pub fn old_swapchain(
mut self,
old_swapchain: SwapchainKHR,
) -> SwapchainCreateInfoKHRBuilder<'a> {
self.inner.old_swapchain = old_swapchain;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSwapchainCreateInfoKHR>(
mut self,
next: &'a mut T,
) -> SwapchainCreateInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SwapchainCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentInfoKHR.html>"]
pub struct PresentInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub wait_semaphore_count: u32,
pub p_wait_semaphores: *const Semaphore,
pub swapchain_count: u32,
pub p_swapchains: *const SwapchainKHR,
pub p_image_indices: *const u32,
pub p_results: *mut Result,
}
impl ::std::default::Default for PresentInfoKHR {
fn default() -> PresentInfoKHR {
PresentInfoKHR {
s_type: StructureType::PRESENT_INFO_KHR,
p_next: ::std::ptr::null(),
wait_semaphore_count: u32::default(),
p_wait_semaphores: ::std::ptr::null(),
swapchain_count: u32::default(),
p_swapchains: ::std::ptr::null(),
p_image_indices: ::std::ptr::null(),
p_results: ::std::ptr::null_mut(),
}
}
}
impl PresentInfoKHR {
pub fn builder<'a>() -> PresentInfoKHRBuilder<'a> {
PresentInfoKHRBuilder {
inner: PresentInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PresentInfoKHRBuilder<'a> {
inner: PresentInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPresentInfoKHR {}
impl<'a> ::std::ops::Deref for PresentInfoKHRBuilder<'a> {
type Target = PresentInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PresentInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PresentInfoKHRBuilder<'a> {
pub fn wait_semaphores(
mut self,
wait_semaphores: &'a [Semaphore],
) -> PresentInfoKHRBuilder<'a> {
self.inner.wait_semaphore_count = wait_semaphores.len() as _;
self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
self
}
pub fn swapchains(mut self, swapchains: &'a [SwapchainKHR]) -> PresentInfoKHRBuilder<'a> {
self.inner.swapchain_count = swapchains.len() as _;
self.inner.p_swapchains = swapchains.as_ptr();
self
}
pub fn image_indices(mut self, image_indices: &'a [u32]) -> PresentInfoKHRBuilder<'a> {
self.inner.swapchain_count = image_indices.len() as _;
self.inner.p_image_indices = image_indices.as_ptr();
self
}
pub fn results(mut self, results: &'a mut [Result]) -> PresentInfoKHRBuilder<'a> {
self.inner.swapchain_count = results.len() as _;
self.inner.p_results = results.as_mut_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPresentInfoKHR>(
mut self,
next: &'a mut T,
) -> PresentInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PresentInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugReportCallbackCreateInfoEXT.html>"]
pub struct DebugReportCallbackCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DebugReportFlagsEXT,
pub pfn_callback: PFN_vkDebugReportCallbackEXT,
pub p_user_data: *mut c_void,
}
impl fmt::Debug for DebugReportCallbackCreateInfoEXT {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("DebugReportCallbackCreateInfoEXT")
.field("s_type", &self.s_type)
.field("p_next", &self.p_next)
.field("flags", &self.flags)
.field("pfn_callback", &(self.pfn_callback.map(|x| x as *const ())))
.field("p_user_data", &self.p_user_data)
.finish()
}
}
impl ::std::default::Default for DebugReportCallbackCreateInfoEXT {
fn default() -> DebugReportCallbackCreateInfoEXT {
DebugReportCallbackCreateInfoEXT {
s_type: StructureType::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: DebugReportFlagsEXT::default(),
pfn_callback: PFN_vkDebugReportCallbackEXT::default(),
p_user_data: ::std::ptr::null_mut(),
}
}
}
impl DebugReportCallbackCreateInfoEXT {
pub fn builder<'a>() -> DebugReportCallbackCreateInfoEXTBuilder<'a> {
DebugReportCallbackCreateInfoEXTBuilder {
inner: DebugReportCallbackCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugReportCallbackCreateInfoEXTBuilder<'a> {
inner: DebugReportCallbackCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsInstanceCreateInfo for DebugReportCallbackCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsInstanceCreateInfo for DebugReportCallbackCreateInfoEXT {}
impl<'a> ::std::ops::Deref for DebugReportCallbackCreateInfoEXTBuilder<'a> {
type Target = DebugReportCallbackCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugReportCallbackCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugReportCallbackCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: DebugReportFlagsEXT,
) -> DebugReportCallbackCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pfn_callback(
mut self,
pfn_callback: PFN_vkDebugReportCallbackEXT,
) -> DebugReportCallbackCreateInfoEXTBuilder<'a> {
self.inner.pfn_callback = pfn_callback;
self
}
pub fn user_data(
mut self,
user_data: *mut c_void,
) -> DebugReportCallbackCreateInfoEXTBuilder<'a> {
self.inner.p_user_data = user_data;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugReportCallbackCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationFlagsEXT.html>"]
pub struct ValidationFlagsEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub disabled_validation_check_count: u32,
pub p_disabled_validation_checks: *const ValidationCheckEXT,
}
impl ::std::default::Default for ValidationFlagsEXT {
fn default() -> ValidationFlagsEXT {
ValidationFlagsEXT {
s_type: StructureType::VALIDATION_FLAGS_EXT,
p_next: ::std::ptr::null(),
disabled_validation_check_count: u32::default(),
p_disabled_validation_checks: ::std::ptr::null(),
}
}
}
impl ValidationFlagsEXT {
pub fn builder<'a>() -> ValidationFlagsEXTBuilder<'a> {
ValidationFlagsEXTBuilder {
inner: ValidationFlagsEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ValidationFlagsEXTBuilder<'a> {
inner: ValidationFlagsEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsInstanceCreateInfo for ValidationFlagsEXTBuilder<'_> {}
unsafe impl ExtendsInstanceCreateInfo for ValidationFlagsEXT {}
impl<'a> ::std::ops::Deref for ValidationFlagsEXTBuilder<'a> {
type Target = ValidationFlagsEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ValidationFlagsEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ValidationFlagsEXTBuilder<'a> {
pub fn disabled_validation_checks(
mut self,
disabled_validation_checks: &'a [ValidationCheckEXT],
) -> ValidationFlagsEXTBuilder<'a> {
self.inner.disabled_validation_check_count = disabled_validation_checks.len() as _;
self.inner.p_disabled_validation_checks = disabled_validation_checks.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ValidationFlagsEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationFeaturesEXT.html>"]
pub struct ValidationFeaturesEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub enabled_validation_feature_count: u32,
pub p_enabled_validation_features: *const ValidationFeatureEnableEXT,
pub disabled_validation_feature_count: u32,
pub p_disabled_validation_features: *const ValidationFeatureDisableEXT,
}
impl ::std::default::Default for ValidationFeaturesEXT {
fn default() -> ValidationFeaturesEXT {
ValidationFeaturesEXT {
s_type: StructureType::VALIDATION_FEATURES_EXT,
p_next: ::std::ptr::null(),
enabled_validation_feature_count: u32::default(),
p_enabled_validation_features: ::std::ptr::null(),
disabled_validation_feature_count: u32::default(),
p_disabled_validation_features: ::std::ptr::null(),
}
}
}
impl ValidationFeaturesEXT {
pub fn builder<'a>() -> ValidationFeaturesEXTBuilder<'a> {
ValidationFeaturesEXTBuilder {
inner: ValidationFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ValidationFeaturesEXTBuilder<'a> {
inner: ValidationFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsInstanceCreateInfo for ValidationFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsInstanceCreateInfo for ValidationFeaturesEXT {}
impl<'a> ::std::ops::Deref for ValidationFeaturesEXTBuilder<'a> {
type Target = ValidationFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ValidationFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ValidationFeaturesEXTBuilder<'a> {
pub fn enabled_validation_features(
mut self,
enabled_validation_features: &'a [ValidationFeatureEnableEXT],
) -> ValidationFeaturesEXTBuilder<'a> {
self.inner.enabled_validation_feature_count = enabled_validation_features.len() as _;
self.inner.p_enabled_validation_features = enabled_validation_features.as_ptr();
self
}
pub fn disabled_validation_features(
mut self,
disabled_validation_features: &'a [ValidationFeatureDisableEXT],
) -> ValidationFeaturesEXTBuilder<'a> {
self.inner.disabled_validation_feature_count = disabled_validation_features.len() as _;
self.inner.p_disabled_validation_features = disabled_validation_features.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ValidationFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationStateRasterizationOrderAMD.html>"]
pub struct PipelineRasterizationStateRasterizationOrderAMD {
pub s_type: StructureType,
pub p_next: *const c_void,
pub rasterization_order: RasterizationOrderAMD,
}
impl ::std::default::Default for PipelineRasterizationStateRasterizationOrderAMD {
fn default() -> PipelineRasterizationStateRasterizationOrderAMD {
PipelineRasterizationStateRasterizationOrderAMD {
s_type: StructureType::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
p_next: ::std::ptr::null(),
rasterization_order: RasterizationOrderAMD::default(),
}
}
}
impl PipelineRasterizationStateRasterizationOrderAMD {
pub fn builder<'a>() -> PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
PipelineRasterizationStateRasterizationOrderAMDBuilder {
inner: PipelineRasterizationStateRasterizationOrderAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
inner: PipelineRasterizationStateRasterizationOrderAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationStateRasterizationOrderAMDBuilder<'_>
{
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationStateRasterizationOrderAMD
{
}
impl<'a> ::std::ops::Deref for PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
type Target = PipelineRasterizationStateRasterizationOrderAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
pub fn rasterization_order(
mut self,
rasterization_order: RasterizationOrderAMD,
) -> PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
self.inner.rasterization_order = rasterization_order;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineRasterizationStateRasterizationOrderAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugMarkerObjectNameInfoEXT.html>"]
pub struct DebugMarkerObjectNameInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_type: DebugReportObjectTypeEXT,
pub object: u64,
pub p_object_name: *const c_char,
}
impl ::std::default::Default for DebugMarkerObjectNameInfoEXT {
fn default() -> DebugMarkerObjectNameInfoEXT {
DebugMarkerObjectNameInfoEXT {
s_type: StructureType::DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
p_next: ::std::ptr::null(),
object_type: DebugReportObjectTypeEXT::default(),
object: u64::default(),
p_object_name: ::std::ptr::null(),
}
}
}
impl DebugMarkerObjectNameInfoEXT {
pub fn builder<'a>() -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
DebugMarkerObjectNameInfoEXTBuilder {
inner: DebugMarkerObjectNameInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugMarkerObjectNameInfoEXTBuilder<'a> {
inner: DebugMarkerObjectNameInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugMarkerObjectNameInfoEXT {}
impl<'a> ::std::ops::Deref for DebugMarkerObjectNameInfoEXTBuilder<'a> {
type Target = DebugMarkerObjectNameInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugMarkerObjectNameInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugMarkerObjectNameInfoEXTBuilder<'a> {
pub fn object_type(
mut self,
object_type: DebugReportObjectTypeEXT,
) -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
self.inner.object_type = object_type;
self
}
pub fn object(mut self, object: u64) -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
self.inner.object = object;
self
}
pub fn object_name(
mut self,
object_name: &'a ::std::ffi::CStr,
) -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
self.inner.p_object_name = object_name.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugMarkerObjectNameInfoEXT>(
mut self,
next: &'a mut T,
) -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugMarkerObjectNameInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugMarkerObjectTagInfoEXT.html>"]
pub struct DebugMarkerObjectTagInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_type: DebugReportObjectTypeEXT,
pub object: u64,
pub tag_name: u64,
pub tag_size: usize,
pub p_tag: *const c_void,
}
impl ::std::default::Default for DebugMarkerObjectTagInfoEXT {
fn default() -> DebugMarkerObjectTagInfoEXT {
DebugMarkerObjectTagInfoEXT {
s_type: StructureType::DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
p_next: ::std::ptr::null(),
object_type: DebugReportObjectTypeEXT::default(),
object: u64::default(),
tag_name: u64::default(),
tag_size: usize::default(),
p_tag: ::std::ptr::null(),
}
}
}
impl DebugMarkerObjectTagInfoEXT {
pub fn builder<'a>() -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
DebugMarkerObjectTagInfoEXTBuilder {
inner: DebugMarkerObjectTagInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugMarkerObjectTagInfoEXTBuilder<'a> {
inner: DebugMarkerObjectTagInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugMarkerObjectTagInfoEXT {}
impl<'a> ::std::ops::Deref for DebugMarkerObjectTagInfoEXTBuilder<'a> {
type Target = DebugMarkerObjectTagInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugMarkerObjectTagInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugMarkerObjectTagInfoEXTBuilder<'a> {
pub fn object_type(
mut self,
object_type: DebugReportObjectTypeEXT,
) -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
self.inner.object_type = object_type;
self
}
pub fn object(mut self, object: u64) -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
self.inner.object = object;
self
}
pub fn tag_name(mut self, tag_name: u64) -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
self.inner.tag_name = tag_name;
self
}
pub fn tag(mut self, tag: &'a [u8]) -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
self.inner.tag_size = tag.len() as _;
self.inner.p_tag = tag.as_ptr() as *const c_void;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugMarkerObjectTagInfoEXT>(
mut self,
next: &'a mut T,
) -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugMarkerObjectTagInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugMarkerMarkerInfoEXT.html>"]
pub struct DebugMarkerMarkerInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_marker_name: *const c_char,
pub color: [f32; 4],
}
impl ::std::default::Default for DebugMarkerMarkerInfoEXT {
fn default() -> DebugMarkerMarkerInfoEXT {
DebugMarkerMarkerInfoEXT {
s_type: StructureType::DEBUG_MARKER_MARKER_INFO_EXT,
p_next: ::std::ptr::null(),
p_marker_name: ::std::ptr::null(),
color: unsafe { ::std::mem::zeroed() },
}
}
}
impl DebugMarkerMarkerInfoEXT {
pub fn builder<'a>() -> DebugMarkerMarkerInfoEXTBuilder<'a> {
DebugMarkerMarkerInfoEXTBuilder {
inner: DebugMarkerMarkerInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugMarkerMarkerInfoEXTBuilder<'a> {
inner: DebugMarkerMarkerInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugMarkerMarkerInfoEXT {}
impl<'a> ::std::ops::Deref for DebugMarkerMarkerInfoEXTBuilder<'a> {
type Target = DebugMarkerMarkerInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugMarkerMarkerInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugMarkerMarkerInfoEXTBuilder<'a> {
pub fn marker_name(
mut self,
marker_name: &'a ::std::ffi::CStr,
) -> DebugMarkerMarkerInfoEXTBuilder<'a> {
self.inner.p_marker_name = marker_name.as_ptr();
self
}
pub fn color(mut self, color: [f32; 4]) -> DebugMarkerMarkerInfoEXTBuilder<'a> {
self.inner.color = color;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugMarkerMarkerInfoEXT>(
mut self,
next: &'a mut T,
) -> DebugMarkerMarkerInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugMarkerMarkerInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDedicatedAllocationImageCreateInfoNV.html>"]
pub struct DedicatedAllocationImageCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub dedicated_allocation: Bool32,
}
impl ::std::default::Default for DedicatedAllocationImageCreateInfoNV {
fn default() -> DedicatedAllocationImageCreateInfoNV {
DedicatedAllocationImageCreateInfoNV {
s_type: StructureType::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
dedicated_allocation: Bool32::default(),
}
}
}
impl DedicatedAllocationImageCreateInfoNV {
pub fn builder<'a>() -> DedicatedAllocationImageCreateInfoNVBuilder<'a> {
DedicatedAllocationImageCreateInfoNVBuilder {
inner: DedicatedAllocationImageCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DedicatedAllocationImageCreateInfoNVBuilder<'a> {
inner: DedicatedAllocationImageCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for DedicatedAllocationImageCreateInfoNVBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for DedicatedAllocationImageCreateInfoNV {}
impl<'a> ::std::ops::Deref for DedicatedAllocationImageCreateInfoNVBuilder<'a> {
type Target = DedicatedAllocationImageCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DedicatedAllocationImageCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DedicatedAllocationImageCreateInfoNVBuilder<'a> {
pub fn dedicated_allocation(
mut self,
dedicated_allocation: bool,
) -> DedicatedAllocationImageCreateInfoNVBuilder<'a> {
self.inner.dedicated_allocation = dedicated_allocation.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DedicatedAllocationImageCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDedicatedAllocationBufferCreateInfoNV.html>"]
pub struct DedicatedAllocationBufferCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub dedicated_allocation: Bool32,
}
impl ::std::default::Default for DedicatedAllocationBufferCreateInfoNV {
fn default() -> DedicatedAllocationBufferCreateInfoNV {
DedicatedAllocationBufferCreateInfoNV {
s_type: StructureType::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
dedicated_allocation: Bool32::default(),
}
}
}
impl DedicatedAllocationBufferCreateInfoNV {
pub fn builder<'a>() -> DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
DedicatedAllocationBufferCreateInfoNVBuilder {
inner: DedicatedAllocationBufferCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
inner: DedicatedAllocationBufferCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBufferCreateInfo for DedicatedAllocationBufferCreateInfoNVBuilder<'_> {}
unsafe impl ExtendsBufferCreateInfo for DedicatedAllocationBufferCreateInfoNV {}
impl<'a> ::std::ops::Deref for DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
type Target = DedicatedAllocationBufferCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
pub fn dedicated_allocation(
mut self,
dedicated_allocation: bool,
) -> DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
self.inner.dedicated_allocation = dedicated_allocation.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DedicatedAllocationBufferCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html>"]
pub struct DedicatedAllocationMemoryAllocateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub image: Image,
pub buffer: Buffer,
}
impl ::std::default::Default for DedicatedAllocationMemoryAllocateInfoNV {
fn default() -> DedicatedAllocationMemoryAllocateInfoNV {
DedicatedAllocationMemoryAllocateInfoNV {
s_type: StructureType::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
p_next: ::std::ptr::null(),
image: Image::default(),
buffer: Buffer::default(),
}
}
}
impl DedicatedAllocationMemoryAllocateInfoNV {
pub fn builder<'a>() -> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
DedicatedAllocationMemoryAllocateInfoNVBuilder {
inner: DedicatedAllocationMemoryAllocateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
inner: DedicatedAllocationMemoryAllocateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for DedicatedAllocationMemoryAllocateInfoNVBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for DedicatedAllocationMemoryAllocateInfoNV {}
impl<'a> ::std::ops::Deref for DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
type Target = DedicatedAllocationMemoryAllocateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
pub fn image(mut self, image: Image) -> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
self.inner.image = image;
self
}
pub fn buffer(mut self, buffer: Buffer) -> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DedicatedAllocationMemoryAllocateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalImageFormatPropertiesNV.html>"]
pub struct ExternalImageFormatPropertiesNV {
pub image_format_properties: ImageFormatProperties,
pub external_memory_features: ExternalMemoryFeatureFlagsNV,
pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV,
pub compatible_handle_types: ExternalMemoryHandleTypeFlagsNV,
}
impl ExternalImageFormatPropertiesNV {
pub fn builder<'a>() -> ExternalImageFormatPropertiesNVBuilder<'a> {
ExternalImageFormatPropertiesNVBuilder {
inner: ExternalImageFormatPropertiesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalImageFormatPropertiesNVBuilder<'a> {
inner: ExternalImageFormatPropertiesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ExternalImageFormatPropertiesNVBuilder<'a> {
type Target = ExternalImageFormatPropertiesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalImageFormatPropertiesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalImageFormatPropertiesNVBuilder<'a> {
pub fn image_format_properties(
mut self,
image_format_properties: ImageFormatProperties,
) -> ExternalImageFormatPropertiesNVBuilder<'a> {
self.inner.image_format_properties = image_format_properties;
self
}
pub fn external_memory_features(
mut self,
external_memory_features: ExternalMemoryFeatureFlagsNV,
) -> ExternalImageFormatPropertiesNVBuilder<'a> {
self.inner.external_memory_features = external_memory_features;
self
}
pub fn export_from_imported_handle_types(
mut self,
export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV,
) -> ExternalImageFormatPropertiesNVBuilder<'a> {
self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
self
}
pub fn compatible_handle_types(
mut self,
compatible_handle_types: ExternalMemoryHandleTypeFlagsNV,
) -> ExternalImageFormatPropertiesNVBuilder<'a> {
self.inner.compatible_handle_types = compatible_handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalImageFormatPropertiesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryImageCreateInfoNV.html>"]
pub struct ExternalMemoryImageCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalMemoryHandleTypeFlagsNV,
}
impl ::std::default::Default for ExternalMemoryImageCreateInfoNV {
fn default() -> ExternalMemoryImageCreateInfoNV {
ExternalMemoryImageCreateInfoNV {
s_type: StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
handle_types: ExternalMemoryHandleTypeFlagsNV::default(),
}
}
}
impl ExternalMemoryImageCreateInfoNV {
pub fn builder<'a>() -> ExternalMemoryImageCreateInfoNVBuilder<'a> {
ExternalMemoryImageCreateInfoNVBuilder {
inner: ExternalMemoryImageCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalMemoryImageCreateInfoNVBuilder<'a> {
inner: ExternalMemoryImageCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoNVBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoNV {}
impl<'a> ::std::ops::Deref for ExternalMemoryImageCreateInfoNVBuilder<'a> {
type Target = ExternalMemoryImageCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalMemoryImageCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalMemoryImageCreateInfoNVBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalMemoryHandleTypeFlagsNV,
) -> ExternalMemoryImageCreateInfoNVBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalMemoryImageCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportMemoryAllocateInfoNV.html>"]
pub struct ExportMemoryAllocateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalMemoryHandleTypeFlagsNV,
}
impl ::std::default::Default for ExportMemoryAllocateInfoNV {
fn default() -> ExportMemoryAllocateInfoNV {
ExportMemoryAllocateInfoNV {
s_type: StructureType::EXPORT_MEMORY_ALLOCATE_INFO_NV,
p_next: ::std::ptr::null(),
handle_types: ExternalMemoryHandleTypeFlagsNV::default(),
}
}
}
impl ExportMemoryAllocateInfoNV {
pub fn builder<'a>() -> ExportMemoryAllocateInfoNVBuilder<'a> {
ExportMemoryAllocateInfoNVBuilder {
inner: ExportMemoryAllocateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportMemoryAllocateInfoNVBuilder<'a> {
inner: ExportMemoryAllocateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoNVBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoNV {}
impl<'a> ::std::ops::Deref for ExportMemoryAllocateInfoNVBuilder<'a> {
type Target = ExportMemoryAllocateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportMemoryAllocateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportMemoryAllocateInfoNVBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalMemoryHandleTypeFlagsNV,
) -> ExportMemoryAllocateInfoNVBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportMemoryAllocateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportMemoryWin32HandleInfoNV.html>"]
pub struct ImportMemoryWin32HandleInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalMemoryHandleTypeFlagsNV,
pub handle: HANDLE,
}
impl ::std::default::Default for ImportMemoryWin32HandleInfoNV {
fn default() -> ImportMemoryWin32HandleInfoNV {
ImportMemoryWin32HandleInfoNV {
s_type: StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
p_next: ::std::ptr::null(),
handle_type: ExternalMemoryHandleTypeFlagsNV::default(),
handle: unsafe { ::std::mem::zeroed() },
}
}
}
impl ImportMemoryWin32HandleInfoNV {
pub fn builder<'a>() -> ImportMemoryWin32HandleInfoNVBuilder<'a> {
ImportMemoryWin32HandleInfoNVBuilder {
inner: ImportMemoryWin32HandleInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportMemoryWin32HandleInfoNVBuilder<'a> {
inner: ImportMemoryWin32HandleInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoNVBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoNV {}
impl<'a> ::std::ops::Deref for ImportMemoryWin32HandleInfoNVBuilder<'a> {
type Target = ImportMemoryWin32HandleInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportMemoryWin32HandleInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportMemoryWin32HandleInfoNVBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlagsNV,
) -> ImportMemoryWin32HandleInfoNVBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn handle(mut self, handle: HANDLE) -> ImportMemoryWin32HandleInfoNVBuilder<'a> {
self.inner.handle = handle;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportMemoryWin32HandleInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportMemoryWin32HandleInfoNV.html>"]
pub struct ExportMemoryWin32HandleInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_attributes: *const SECURITY_ATTRIBUTES,
pub dw_access: DWORD,
}
impl ::std::default::Default for ExportMemoryWin32HandleInfoNV {
fn default() -> ExportMemoryWin32HandleInfoNV {
ExportMemoryWin32HandleInfoNV {
s_type: StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
p_next: ::std::ptr::null(),
p_attributes: ::std::ptr::null(),
dw_access: DWORD::default(),
}
}
}
impl ExportMemoryWin32HandleInfoNV {
pub fn builder<'a>() -> ExportMemoryWin32HandleInfoNVBuilder<'a> {
ExportMemoryWin32HandleInfoNVBuilder {
inner: ExportMemoryWin32HandleInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportMemoryWin32HandleInfoNVBuilder<'a> {
inner: ExportMemoryWin32HandleInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoNVBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoNV {}
impl<'a> ::std::ops::Deref for ExportMemoryWin32HandleInfoNVBuilder<'a> {
type Target = ExportMemoryWin32HandleInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportMemoryWin32HandleInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportMemoryWin32HandleInfoNVBuilder<'a> {
pub fn attributes(
mut self,
attributes: &'a SECURITY_ATTRIBUTES,
) -> ExportMemoryWin32HandleInfoNVBuilder<'a> {
self.inner.p_attributes = attributes;
self
}
pub fn dw_access(mut self, dw_access: DWORD) -> ExportMemoryWin32HandleInfoNVBuilder<'a> {
self.inner.dw_access = dw_access;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportMemoryWin32HandleInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWin32KeyedMutexAcquireReleaseInfoNV.html>"]
pub struct Win32KeyedMutexAcquireReleaseInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub acquire_count: u32,
pub p_acquire_syncs: *const DeviceMemory,
pub p_acquire_keys: *const u64,
pub p_acquire_timeout_milliseconds: *const u32,
pub release_count: u32,
pub p_release_syncs: *const DeviceMemory,
pub p_release_keys: *const u64,
}
impl ::std::default::Default for Win32KeyedMutexAcquireReleaseInfoNV {
fn default() -> Win32KeyedMutexAcquireReleaseInfoNV {
Win32KeyedMutexAcquireReleaseInfoNV {
s_type: StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
p_next: ::std::ptr::null(),
acquire_count: u32::default(),
p_acquire_syncs: ::std::ptr::null(),
p_acquire_keys: ::std::ptr::null(),
p_acquire_timeout_milliseconds: ::std::ptr::null(),
release_count: u32::default(),
p_release_syncs: ::std::ptr::null(),
p_release_keys: ::std::ptr::null(),
}
}
}
impl Win32KeyedMutexAcquireReleaseInfoNV {
pub fn builder<'a>() -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
Win32KeyedMutexAcquireReleaseInfoNVBuilder {
inner: Win32KeyedMutexAcquireReleaseInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
inner: Win32KeyedMutexAcquireReleaseInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'_> {}
unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNV {}
impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
type Target = Win32KeyedMutexAcquireReleaseInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
pub fn acquire_syncs(
mut self,
acquire_syncs: &'a [DeviceMemory],
) -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
self.inner.acquire_count = acquire_syncs.len() as _;
self.inner.p_acquire_syncs = acquire_syncs.as_ptr();
self
}
pub fn acquire_keys(
mut self,
acquire_keys: &'a [u64],
) -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
self.inner.acquire_count = acquire_keys.len() as _;
self.inner.p_acquire_keys = acquire_keys.as_ptr();
self
}
pub fn acquire_timeout_milliseconds(
mut self,
acquire_timeout_milliseconds: &'a [u32],
) -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
self.inner.acquire_count = acquire_timeout_milliseconds.len() as _;
self.inner.p_acquire_timeout_milliseconds = acquire_timeout_milliseconds.as_ptr();
self
}
pub fn release_syncs(
mut self,
release_syncs: &'a [DeviceMemory],
) -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
self.inner.release_count = release_syncs.len() as _;
self.inner.p_release_syncs = release_syncs.as_ptr();
self
}
pub fn release_keys(
mut self,
release_keys: &'a [u64],
) -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
self.inner.release_count = release_keys.len() as _;
self.inner.p_release_keys = release_keys.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Win32KeyedMutexAcquireReleaseInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGeneratedCommandsFeaturesNVX.html>"]
pub struct DeviceGeneratedCommandsFeaturesNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub compute_binding_point_support: Bool32,
}
impl ::std::default::Default for DeviceGeneratedCommandsFeaturesNVX {
fn default() -> DeviceGeneratedCommandsFeaturesNVX {
DeviceGeneratedCommandsFeaturesNVX {
s_type: StructureType::DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
p_next: ::std::ptr::null(),
compute_binding_point_support: Bool32::default(),
}
}
}
impl DeviceGeneratedCommandsFeaturesNVX {
pub fn builder<'a>() -> DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
DeviceGeneratedCommandsFeaturesNVXBuilder {
inner: DeviceGeneratedCommandsFeaturesNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
inner: DeviceGeneratedCommandsFeaturesNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceGeneratedCommandsFeaturesNVX {}
impl<'a> ::std::ops::Deref for DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
type Target = DeviceGeneratedCommandsFeaturesNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
pub fn compute_binding_point_support(
mut self,
compute_binding_point_support: bool,
) -> DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
self.inner.compute_binding_point_support = compute_binding_point_support.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceGeneratedCommandsFeaturesNVX>(
mut self,
next: &'a mut T,
) -> DeviceGeneratedCommandsFeaturesNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGeneratedCommandsFeaturesNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGeneratedCommandsLimitsNVX.html>"]
pub struct DeviceGeneratedCommandsLimitsNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub max_indirect_commands_layout_token_count: u32,
pub max_object_entry_counts: u32,
pub min_sequence_count_buffer_offset_alignment: u32,
pub min_sequence_index_buffer_offset_alignment: u32,
pub min_commands_token_buffer_offset_alignment: u32,
}
impl ::std::default::Default for DeviceGeneratedCommandsLimitsNVX {
fn default() -> DeviceGeneratedCommandsLimitsNVX {
DeviceGeneratedCommandsLimitsNVX {
s_type: StructureType::DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
p_next: ::std::ptr::null(),
max_indirect_commands_layout_token_count: u32::default(),
max_object_entry_counts: u32::default(),
min_sequence_count_buffer_offset_alignment: u32::default(),
min_sequence_index_buffer_offset_alignment: u32::default(),
min_commands_token_buffer_offset_alignment: u32::default(),
}
}
}
impl DeviceGeneratedCommandsLimitsNVX {
pub fn builder<'a>() -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
DeviceGeneratedCommandsLimitsNVXBuilder {
inner: DeviceGeneratedCommandsLimitsNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
inner: DeviceGeneratedCommandsLimitsNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceGeneratedCommandsLimitsNVX {}
impl<'a> ::std::ops::Deref for DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
type Target = DeviceGeneratedCommandsLimitsNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
pub fn max_indirect_commands_layout_token_count(
mut self,
max_indirect_commands_layout_token_count: u32,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
self.inner.max_indirect_commands_layout_token_count =
max_indirect_commands_layout_token_count;
self
}
pub fn max_object_entry_counts(
mut self,
max_object_entry_counts: u32,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
self.inner.max_object_entry_counts = max_object_entry_counts;
self
}
pub fn min_sequence_count_buffer_offset_alignment(
mut self,
min_sequence_count_buffer_offset_alignment: u32,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
self.inner.min_sequence_count_buffer_offset_alignment =
min_sequence_count_buffer_offset_alignment;
self
}
pub fn min_sequence_index_buffer_offset_alignment(
mut self,
min_sequence_index_buffer_offset_alignment: u32,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
self.inner.min_sequence_index_buffer_offset_alignment =
min_sequence_index_buffer_offset_alignment;
self
}
pub fn min_commands_token_buffer_offset_alignment(
mut self,
min_commands_token_buffer_offset_alignment: u32,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
self.inner.min_commands_token_buffer_offset_alignment =
min_commands_token_buffer_offset_alignment;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceGeneratedCommandsLimitsNVX>(
mut self,
next: &'a mut T,
) -> DeviceGeneratedCommandsLimitsNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGeneratedCommandsLimitsNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsTokenNVX.html>"]
pub struct IndirectCommandsTokenNVX {
pub token_type: IndirectCommandsTokenTypeNVX,
pub buffer: Buffer,
pub offset: DeviceSize,
}
impl IndirectCommandsTokenNVX {
pub fn builder<'a>() -> IndirectCommandsTokenNVXBuilder<'a> {
IndirectCommandsTokenNVXBuilder {
inner: IndirectCommandsTokenNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct IndirectCommandsTokenNVXBuilder<'a> {
inner: IndirectCommandsTokenNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for IndirectCommandsTokenNVXBuilder<'a> {
type Target = IndirectCommandsTokenNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for IndirectCommandsTokenNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> IndirectCommandsTokenNVXBuilder<'a> {
pub fn token_type(
mut self,
token_type: IndirectCommandsTokenTypeNVX,
) -> IndirectCommandsTokenNVXBuilder<'a> {
self.inner.token_type = token_type;
self
}
pub fn buffer(mut self, buffer: Buffer) -> IndirectCommandsTokenNVXBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn offset(mut self, offset: DeviceSize) -> IndirectCommandsTokenNVXBuilder<'a> {
self.inner.offset = offset;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> IndirectCommandsTokenNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsLayoutTokenNVX.html>"]
pub struct IndirectCommandsLayoutTokenNVX {
pub token_type: IndirectCommandsTokenTypeNVX,
pub binding_unit: u32,
pub dynamic_count: u32,
pub divisor: u32,
}
impl IndirectCommandsLayoutTokenNVX {
pub fn builder<'a>() -> IndirectCommandsLayoutTokenNVXBuilder<'a> {
IndirectCommandsLayoutTokenNVXBuilder {
inner: IndirectCommandsLayoutTokenNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct IndirectCommandsLayoutTokenNVXBuilder<'a> {
inner: IndirectCommandsLayoutTokenNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for IndirectCommandsLayoutTokenNVXBuilder<'a> {
type Target = IndirectCommandsLayoutTokenNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for IndirectCommandsLayoutTokenNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> IndirectCommandsLayoutTokenNVXBuilder<'a> {
pub fn token_type(
mut self,
token_type: IndirectCommandsTokenTypeNVX,
) -> IndirectCommandsLayoutTokenNVXBuilder<'a> {
self.inner.token_type = token_type;
self
}
pub fn binding_unit(mut self, binding_unit: u32) -> IndirectCommandsLayoutTokenNVXBuilder<'a> {
self.inner.binding_unit = binding_unit;
self
}
pub fn dynamic_count(
mut self,
dynamic_count: u32,
) -> IndirectCommandsLayoutTokenNVXBuilder<'a> {
self.inner.dynamic_count = dynamic_count;
self
}
pub fn divisor(mut self, divisor: u32) -> IndirectCommandsLayoutTokenNVXBuilder<'a> {
self.inner.divisor = divisor;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> IndirectCommandsLayoutTokenNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsLayoutCreateInfoNVX.html>"]
pub struct IndirectCommandsLayoutCreateInfoNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub pipeline_bind_point: PipelineBindPoint,
pub flags: IndirectCommandsLayoutUsageFlagsNVX,
pub token_count: u32,
pub p_tokens: *const IndirectCommandsLayoutTokenNVX,
}
impl ::std::default::Default for IndirectCommandsLayoutCreateInfoNVX {
fn default() -> IndirectCommandsLayoutCreateInfoNVX {
IndirectCommandsLayoutCreateInfoNVX {
s_type: StructureType::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
p_next: ::std::ptr::null(),
pipeline_bind_point: PipelineBindPoint::default(),
flags: IndirectCommandsLayoutUsageFlagsNVX::default(),
token_count: u32::default(),
p_tokens: ::std::ptr::null(),
}
}
}
impl IndirectCommandsLayoutCreateInfoNVX {
pub fn builder<'a>() -> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
IndirectCommandsLayoutCreateInfoNVXBuilder {
inner: IndirectCommandsLayoutCreateInfoNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
inner: IndirectCommandsLayoutCreateInfoNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsIndirectCommandsLayoutCreateInfoNVX {}
impl<'a> ::std::ops::Deref for IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
type Target = IndirectCommandsLayoutCreateInfoNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
pub fn pipeline_bind_point(
mut self,
pipeline_bind_point: PipelineBindPoint,
) -> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
self.inner.pipeline_bind_point = pipeline_bind_point;
self
}
pub fn flags(
mut self,
flags: IndirectCommandsLayoutUsageFlagsNVX,
) -> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn tokens(
mut self,
tokens: &'a [IndirectCommandsLayoutTokenNVX],
) -> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
self.inner.token_count = tokens.len() as _;
self.inner.p_tokens = tokens.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsIndirectCommandsLayoutCreateInfoNVX>(
mut self,
next: &'a mut T,
) -> IndirectCommandsLayoutCreateInfoNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> IndirectCommandsLayoutCreateInfoNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCmdProcessCommandsInfoNVX.html>"]
pub struct CmdProcessCommandsInfoNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_table: ObjectTableNVX,
pub indirect_commands_layout: IndirectCommandsLayoutNVX,
pub indirect_commands_token_count: u32,
pub p_indirect_commands_tokens: *const IndirectCommandsTokenNVX,
pub max_sequences_count: u32,
pub target_command_buffer: CommandBuffer,
pub sequences_count_buffer: Buffer,
pub sequences_count_offset: DeviceSize,
pub sequences_index_buffer: Buffer,
pub sequences_index_offset: DeviceSize,
}
impl ::std::default::Default for CmdProcessCommandsInfoNVX {
fn default() -> CmdProcessCommandsInfoNVX {
CmdProcessCommandsInfoNVX {
s_type: StructureType::CMD_PROCESS_COMMANDS_INFO_NVX,
p_next: ::std::ptr::null(),
object_table: ObjectTableNVX::default(),
indirect_commands_layout: IndirectCommandsLayoutNVX::default(),
indirect_commands_token_count: u32::default(),
p_indirect_commands_tokens: ::std::ptr::null(),
max_sequences_count: u32::default(),
target_command_buffer: CommandBuffer::default(),
sequences_count_buffer: Buffer::default(),
sequences_count_offset: DeviceSize::default(),
sequences_index_buffer: Buffer::default(),
sequences_index_offset: DeviceSize::default(),
}
}
}
impl CmdProcessCommandsInfoNVX {
pub fn builder<'a>() -> CmdProcessCommandsInfoNVXBuilder<'a> {
CmdProcessCommandsInfoNVXBuilder {
inner: CmdProcessCommandsInfoNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CmdProcessCommandsInfoNVXBuilder<'a> {
inner: CmdProcessCommandsInfoNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCmdProcessCommandsInfoNVX {}
impl<'a> ::std::ops::Deref for CmdProcessCommandsInfoNVXBuilder<'a> {
type Target = CmdProcessCommandsInfoNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CmdProcessCommandsInfoNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CmdProcessCommandsInfoNVXBuilder<'a> {
pub fn object_table(
mut self,
object_table: ObjectTableNVX,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.object_table = object_table;
self
}
pub fn indirect_commands_layout(
mut self,
indirect_commands_layout: IndirectCommandsLayoutNVX,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.indirect_commands_layout = indirect_commands_layout;
self
}
pub fn indirect_commands_tokens(
mut self,
indirect_commands_tokens: &'a [IndirectCommandsTokenNVX],
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.indirect_commands_token_count = indirect_commands_tokens.len() as _;
self.inner.p_indirect_commands_tokens = indirect_commands_tokens.as_ptr();
self
}
pub fn max_sequences_count(
mut self,
max_sequences_count: u32,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.max_sequences_count = max_sequences_count;
self
}
pub fn target_command_buffer(
mut self,
target_command_buffer: CommandBuffer,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.target_command_buffer = target_command_buffer;
self
}
pub fn sequences_count_buffer(
mut self,
sequences_count_buffer: Buffer,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.sequences_count_buffer = sequences_count_buffer;
self
}
pub fn sequences_count_offset(
mut self,
sequences_count_offset: DeviceSize,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.sequences_count_offset = sequences_count_offset;
self
}
pub fn sequences_index_buffer(
mut self,
sequences_index_buffer: Buffer,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.sequences_index_buffer = sequences_index_buffer;
self
}
pub fn sequences_index_offset(
mut self,
sequences_index_offset: DeviceSize,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
self.inner.sequences_index_offset = sequences_index_offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCmdProcessCommandsInfoNVX>(
mut self,
next: &'a mut T,
) -> CmdProcessCommandsInfoNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CmdProcessCommandsInfoNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCmdReserveSpaceForCommandsInfoNVX.html>"]
pub struct CmdReserveSpaceForCommandsInfoNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_table: ObjectTableNVX,
pub indirect_commands_layout: IndirectCommandsLayoutNVX,
pub max_sequences_count: u32,
}
impl ::std::default::Default for CmdReserveSpaceForCommandsInfoNVX {
fn default() -> CmdReserveSpaceForCommandsInfoNVX {
CmdReserveSpaceForCommandsInfoNVX {
s_type: StructureType::CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
p_next: ::std::ptr::null(),
object_table: ObjectTableNVX::default(),
indirect_commands_layout: IndirectCommandsLayoutNVX::default(),
max_sequences_count: u32::default(),
}
}
}
impl CmdReserveSpaceForCommandsInfoNVX {
pub fn builder<'a>() -> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
CmdReserveSpaceForCommandsInfoNVXBuilder {
inner: CmdReserveSpaceForCommandsInfoNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
inner: CmdReserveSpaceForCommandsInfoNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCmdReserveSpaceForCommandsInfoNVX {}
impl<'a> ::std::ops::Deref for CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
type Target = CmdReserveSpaceForCommandsInfoNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
pub fn object_table(
mut self,
object_table: ObjectTableNVX,
) -> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
self.inner.object_table = object_table;
self
}
pub fn indirect_commands_layout(
mut self,
indirect_commands_layout: IndirectCommandsLayoutNVX,
) -> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
self.inner.indirect_commands_layout = indirect_commands_layout;
self
}
pub fn max_sequences_count(
mut self,
max_sequences_count: u32,
) -> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
self.inner.max_sequences_count = max_sequences_count;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCmdReserveSpaceForCommandsInfoNVX>(
mut self,
next: &'a mut T,
) -> CmdReserveSpaceForCommandsInfoNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CmdReserveSpaceForCommandsInfoNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableCreateInfoNVX.html>"]
pub struct ObjectTableCreateInfoNVX {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_count: u32,
pub p_object_entry_types: *const ObjectEntryTypeNVX,
pub p_object_entry_counts: *const u32,
pub p_object_entry_usage_flags: *const ObjectEntryUsageFlagsNVX,
pub max_uniform_buffers_per_descriptor: u32,
pub max_storage_buffers_per_descriptor: u32,
pub max_storage_images_per_descriptor: u32,
pub max_sampled_images_per_descriptor: u32,
pub max_pipeline_layouts: u32,
}
impl ::std::default::Default for ObjectTableCreateInfoNVX {
fn default() -> ObjectTableCreateInfoNVX {
ObjectTableCreateInfoNVX {
s_type: StructureType::OBJECT_TABLE_CREATE_INFO_NVX,
p_next: ::std::ptr::null(),
object_count: u32::default(),
p_object_entry_types: ::std::ptr::null(),
p_object_entry_counts: ::std::ptr::null(),
p_object_entry_usage_flags: ::std::ptr::null(),
max_uniform_buffers_per_descriptor: u32::default(),
max_storage_buffers_per_descriptor: u32::default(),
max_storage_images_per_descriptor: u32::default(),
max_sampled_images_per_descriptor: u32::default(),
max_pipeline_layouts: u32::default(),
}
}
}
impl ObjectTableCreateInfoNVX {
pub fn builder<'a>() -> ObjectTableCreateInfoNVXBuilder<'a> {
ObjectTableCreateInfoNVXBuilder {
inner: ObjectTableCreateInfoNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTableCreateInfoNVXBuilder<'a> {
inner: ObjectTableCreateInfoNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsObjectTableCreateInfoNVX {}
impl<'a> ::std::ops::Deref for ObjectTableCreateInfoNVXBuilder<'a> {
type Target = ObjectTableCreateInfoNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTableCreateInfoNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTableCreateInfoNVXBuilder<'a> {
pub fn object_entry_types(
mut self,
object_entry_types: &'a [ObjectEntryTypeNVX],
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.object_count = object_entry_types.len() as _;
self.inner.p_object_entry_types = object_entry_types.as_ptr();
self
}
pub fn object_entry_counts(
mut self,
object_entry_counts: &'a [u32],
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.object_count = object_entry_counts.len() as _;
self.inner.p_object_entry_counts = object_entry_counts.as_ptr();
self
}
pub fn object_entry_usage_flags(
mut self,
object_entry_usage_flags: &'a [ObjectEntryUsageFlagsNVX],
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.object_count = object_entry_usage_flags.len() as _;
self.inner.p_object_entry_usage_flags = object_entry_usage_flags.as_ptr();
self
}
pub fn max_uniform_buffers_per_descriptor(
mut self,
max_uniform_buffers_per_descriptor: u32,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.max_uniform_buffers_per_descriptor = max_uniform_buffers_per_descriptor;
self
}
pub fn max_storage_buffers_per_descriptor(
mut self,
max_storage_buffers_per_descriptor: u32,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.max_storage_buffers_per_descriptor = max_storage_buffers_per_descriptor;
self
}
pub fn max_storage_images_per_descriptor(
mut self,
max_storage_images_per_descriptor: u32,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.max_storage_images_per_descriptor = max_storage_images_per_descriptor;
self
}
pub fn max_sampled_images_per_descriptor(
mut self,
max_sampled_images_per_descriptor: u32,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.max_sampled_images_per_descriptor = max_sampled_images_per_descriptor;
self
}
pub fn max_pipeline_layouts(
mut self,
max_pipeline_layouts: u32,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
self.inner.max_pipeline_layouts = max_pipeline_layouts;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsObjectTableCreateInfoNVX>(
mut self,
next: &'a mut T,
) -> ObjectTableCreateInfoNVXBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTableCreateInfoNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableEntryNVX.html>"]
pub struct ObjectTableEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
}
impl ObjectTableEntryNVX {
pub fn builder<'a>() -> ObjectTableEntryNVXBuilder<'a> {
ObjectTableEntryNVXBuilder {
inner: ObjectTableEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTableEntryNVXBuilder<'a> {
inner: ObjectTableEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTableEntryNVXBuilder<'a> {
type Target = ObjectTableEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTableEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTableEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTableEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(mut self, flags: ObjectEntryUsageFlagsNVX) -> ObjectTableEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTableEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTablePipelineEntryNVX.html>"]
pub struct ObjectTablePipelineEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
pub pipeline: Pipeline,
}
impl ObjectTablePipelineEntryNVX {
pub fn builder<'a>() -> ObjectTablePipelineEntryNVXBuilder<'a> {
ObjectTablePipelineEntryNVXBuilder {
inner: ObjectTablePipelineEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTablePipelineEntryNVXBuilder<'a> {
inner: ObjectTablePipelineEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTablePipelineEntryNVXBuilder<'a> {
type Target = ObjectTablePipelineEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTablePipelineEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTablePipelineEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTablePipelineEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: ObjectEntryUsageFlagsNVX,
) -> ObjectTablePipelineEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pipeline(mut self, pipeline: Pipeline) -> ObjectTablePipelineEntryNVXBuilder<'a> {
self.inner.pipeline = pipeline;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTablePipelineEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableDescriptorSetEntryNVX.html>"]
pub struct ObjectTableDescriptorSetEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
pub pipeline_layout: PipelineLayout,
pub descriptor_set: DescriptorSet,
}
impl ObjectTableDescriptorSetEntryNVX {
pub fn builder<'a>() -> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
ObjectTableDescriptorSetEntryNVXBuilder {
inner: ObjectTableDescriptorSetEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTableDescriptorSetEntryNVXBuilder<'a> {
inner: ObjectTableDescriptorSetEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTableDescriptorSetEntryNVXBuilder<'a> {
type Target = ObjectTableDescriptorSetEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTableDescriptorSetEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: ObjectEntryUsageFlagsNVX,
) -> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pipeline_layout(
mut self,
pipeline_layout: PipelineLayout,
) -> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
self.inner.pipeline_layout = pipeline_layout;
self
}
pub fn descriptor_set(
mut self,
descriptor_set: DescriptorSet,
) -> ObjectTableDescriptorSetEntryNVXBuilder<'a> {
self.inner.descriptor_set = descriptor_set;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTableDescriptorSetEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableVertexBufferEntryNVX.html>"]
pub struct ObjectTableVertexBufferEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
pub buffer: Buffer,
}
impl ObjectTableVertexBufferEntryNVX {
pub fn builder<'a>() -> ObjectTableVertexBufferEntryNVXBuilder<'a> {
ObjectTableVertexBufferEntryNVXBuilder {
inner: ObjectTableVertexBufferEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTableVertexBufferEntryNVXBuilder<'a> {
inner: ObjectTableVertexBufferEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTableVertexBufferEntryNVXBuilder<'a> {
type Target = ObjectTableVertexBufferEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTableVertexBufferEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTableVertexBufferEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTableVertexBufferEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: ObjectEntryUsageFlagsNVX,
) -> ObjectTableVertexBufferEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn buffer(mut self, buffer: Buffer) -> ObjectTableVertexBufferEntryNVXBuilder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTableVertexBufferEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTableIndexBufferEntryNVX.html>"]
pub struct ObjectTableIndexBufferEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
pub buffer: Buffer,
pub index_type: IndexType,
}
impl ObjectTableIndexBufferEntryNVX {
pub fn builder<'a>() -> ObjectTableIndexBufferEntryNVXBuilder<'a> {
ObjectTableIndexBufferEntryNVXBuilder {
inner: ObjectTableIndexBufferEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTableIndexBufferEntryNVXBuilder<'a> {
inner: ObjectTableIndexBufferEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTableIndexBufferEntryNVXBuilder<'a> {
type Target = ObjectTableIndexBufferEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTableIndexBufferEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTableIndexBufferEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTableIndexBufferEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: ObjectEntryUsageFlagsNVX,
) -> ObjectTableIndexBufferEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn buffer(mut self, buffer: Buffer) -> ObjectTableIndexBufferEntryNVXBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn index_type(
mut self,
index_type: IndexType,
) -> ObjectTableIndexBufferEntryNVXBuilder<'a> {
self.inner.index_type = index_type;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTableIndexBufferEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectTablePushConstantEntryNVX.html>"]
pub struct ObjectTablePushConstantEntryNVX {
pub ty: ObjectEntryTypeNVX,
pub flags: ObjectEntryUsageFlagsNVX,
pub pipeline_layout: PipelineLayout,
pub stage_flags: ShaderStageFlags,
}
impl ObjectTablePushConstantEntryNVX {
pub fn builder<'a>() -> ObjectTablePushConstantEntryNVXBuilder<'a> {
ObjectTablePushConstantEntryNVXBuilder {
inner: ObjectTablePushConstantEntryNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ObjectTablePushConstantEntryNVXBuilder<'a> {
inner: ObjectTablePushConstantEntryNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ObjectTablePushConstantEntryNVXBuilder<'a> {
type Target = ObjectTablePushConstantEntryNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ObjectTablePushConstantEntryNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ObjectTablePushConstantEntryNVXBuilder<'a> {
pub fn ty(mut self, ty: ObjectEntryTypeNVX) -> ObjectTablePushConstantEntryNVXBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: ObjectEntryUsageFlagsNVX,
) -> ObjectTablePushConstantEntryNVXBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pipeline_layout(
mut self,
pipeline_layout: PipelineLayout,
) -> ObjectTablePushConstantEntryNVXBuilder<'a> {
self.inner.pipeline_layout = pipeline_layout;
self
}
pub fn stage_flags(
mut self,
stage_flags: ShaderStageFlags,
) -> ObjectTablePushConstantEntryNVXBuilder<'a> {
self.inner.stage_flags = stage_flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ObjectTablePushConstantEntryNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFeatures2.html>"]
pub struct PhysicalDeviceFeatures2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub features: PhysicalDeviceFeatures,
}
impl ::std::default::Default for PhysicalDeviceFeatures2 {
fn default() -> PhysicalDeviceFeatures2 {
PhysicalDeviceFeatures2 {
s_type: StructureType::PHYSICAL_DEVICE_FEATURES_2,
p_next: ::std::ptr::null_mut(),
features: PhysicalDeviceFeatures::default(),
}
}
}
impl PhysicalDeviceFeatures2 {
pub fn builder<'a>() -> PhysicalDeviceFeatures2Builder<'a> {
PhysicalDeviceFeatures2Builder {
inner: PhysicalDeviceFeatures2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFeatures2Builder<'a> {
inner: PhysicalDeviceFeatures2,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFeatures2Builder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFeatures2 {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFeatures2Builder<'a> {
type Target = PhysicalDeviceFeatures2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFeatures2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFeatures2Builder<'a> {
pub fn features(
mut self,
features: PhysicalDeviceFeatures,
) -> PhysicalDeviceFeatures2Builder<'a> {
self.inner.features = features;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFeatures2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceProperties2.html>"]
pub struct PhysicalDeviceProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub properties: PhysicalDeviceProperties,
}
impl ::std::default::Default for PhysicalDeviceProperties2 {
fn default() -> PhysicalDeviceProperties2 {
PhysicalDeviceProperties2 {
s_type: StructureType::PHYSICAL_DEVICE_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
properties: PhysicalDeviceProperties::default(),
}
}
}
impl PhysicalDeviceProperties2 {
pub fn builder<'a>() -> PhysicalDeviceProperties2Builder<'a> {
PhysicalDeviceProperties2Builder {
inner: PhysicalDeviceProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceProperties2Builder<'a> {
inner: PhysicalDeviceProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceProperties2 {}
impl<'a> ::std::ops::Deref for PhysicalDeviceProperties2Builder<'a> {
type Target = PhysicalDeviceProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceProperties2Builder<'a> {
pub fn properties(
mut self,
properties: PhysicalDeviceProperties,
) -> PhysicalDeviceProperties2Builder<'a> {
self.inner.properties = properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceProperties2>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFormatProperties2.html>"]
pub struct FormatProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub format_properties: FormatProperties,
}
impl ::std::default::Default for FormatProperties2 {
fn default() -> FormatProperties2 {
FormatProperties2 {
s_type: StructureType::FORMAT_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
format_properties: FormatProperties::default(),
}
}
}
impl FormatProperties2 {
pub fn builder<'a>() -> FormatProperties2Builder<'a> {
FormatProperties2Builder {
inner: FormatProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FormatProperties2Builder<'a> {
inner: FormatProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsFormatProperties2 {}
impl<'a> ::std::ops::Deref for FormatProperties2Builder<'a> {
type Target = FormatProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FormatProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FormatProperties2Builder<'a> {
pub fn format_properties(
mut self,
format_properties: FormatProperties,
) -> FormatProperties2Builder<'a> {
self.inner.format_properties = format_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsFormatProperties2>(
mut self,
next: &'a mut T,
) -> FormatProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FormatProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageFormatProperties2.html>"]
pub struct ImageFormatProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub image_format_properties: ImageFormatProperties,
}
impl ::std::default::Default for ImageFormatProperties2 {
fn default() -> ImageFormatProperties2 {
ImageFormatProperties2 {
s_type: StructureType::IMAGE_FORMAT_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
image_format_properties: ImageFormatProperties::default(),
}
}
}
impl ImageFormatProperties2 {
pub fn builder<'a>() -> ImageFormatProperties2Builder<'a> {
ImageFormatProperties2Builder {
inner: ImageFormatProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageFormatProperties2Builder<'a> {
inner: ImageFormatProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageFormatProperties2 {}
impl<'a> ::std::ops::Deref for ImageFormatProperties2Builder<'a> {
type Target = ImageFormatProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageFormatProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageFormatProperties2Builder<'a> {
pub fn image_format_properties(
mut self,
image_format_properties: ImageFormatProperties,
) -> ImageFormatProperties2Builder<'a> {
self.inner.image_format_properties = image_format_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageFormatProperties2>(
mut self,
next: &'a mut T,
) -> ImageFormatProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageFormatProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceImageFormatInfo2.html>"]
pub struct PhysicalDeviceImageFormatInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub format: Format,
pub ty: ImageType,
pub tiling: ImageTiling,
pub usage: ImageUsageFlags,
pub flags: ImageCreateFlags,
}
impl ::std::default::Default for PhysicalDeviceImageFormatInfo2 {
fn default() -> PhysicalDeviceImageFormatInfo2 {
PhysicalDeviceImageFormatInfo2 {
s_type: StructureType::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
p_next: ::std::ptr::null(),
format: Format::default(),
ty: ImageType::default(),
tiling: ImageTiling::default(),
usage: ImageUsageFlags::default(),
flags: ImageCreateFlags::default(),
}
}
}
impl PhysicalDeviceImageFormatInfo2 {
pub fn builder<'a>() -> PhysicalDeviceImageFormatInfo2Builder<'a> {
PhysicalDeviceImageFormatInfo2Builder {
inner: PhysicalDeviceImageFormatInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceImageFormatInfo2Builder<'a> {
inner: PhysicalDeviceImageFormatInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceImageFormatInfo2 {}
impl<'a> ::std::ops::Deref for PhysicalDeviceImageFormatInfo2Builder<'a> {
type Target = PhysicalDeviceImageFormatInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageFormatInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceImageFormatInfo2Builder<'a> {
pub fn format(mut self, format: Format) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
self.inner.format = format;
self
}
pub fn ty(mut self, ty: ImageType) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
self.inner.ty = ty;
self
}
pub fn tiling(mut self, tiling: ImageTiling) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
self.inner.tiling = tiling;
self
}
pub fn usage(mut self, usage: ImageUsageFlags) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
self.inner.usage = usage;
self
}
pub fn flags(mut self, flags: ImageCreateFlags) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceImageFormatInfo2>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceImageFormatInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceImageFormatInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueueFamilyProperties2.html>"]
pub struct QueueFamilyProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub queue_family_properties: QueueFamilyProperties,
}
impl ::std::default::Default for QueueFamilyProperties2 {
fn default() -> QueueFamilyProperties2 {
QueueFamilyProperties2 {
s_type: StructureType::QUEUE_FAMILY_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
queue_family_properties: QueueFamilyProperties::default(),
}
}
}
impl QueueFamilyProperties2 {
pub fn builder<'a>() -> QueueFamilyProperties2Builder<'a> {
QueueFamilyProperties2Builder {
inner: QueueFamilyProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct QueueFamilyProperties2Builder<'a> {
inner: QueueFamilyProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsQueueFamilyProperties2 {}
impl<'a> ::std::ops::Deref for QueueFamilyProperties2Builder<'a> {
type Target = QueueFamilyProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for QueueFamilyProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> QueueFamilyProperties2Builder<'a> {
pub fn queue_family_properties(
mut self,
queue_family_properties: QueueFamilyProperties,
) -> QueueFamilyProperties2Builder<'a> {
self.inner.queue_family_properties = queue_family_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsQueueFamilyProperties2>(
mut self,
next: &'a mut T,
) -> QueueFamilyProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> QueueFamilyProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMemoryProperties2.html>"]
pub struct PhysicalDeviceMemoryProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_properties: PhysicalDeviceMemoryProperties,
}
impl ::std::default::Default for PhysicalDeviceMemoryProperties2 {
fn default() -> PhysicalDeviceMemoryProperties2 {
PhysicalDeviceMemoryProperties2 {
s_type: StructureType::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
memory_properties: PhysicalDeviceMemoryProperties::default(),
}
}
}
impl PhysicalDeviceMemoryProperties2 {
pub fn builder<'a>() -> PhysicalDeviceMemoryProperties2Builder<'a> {
PhysicalDeviceMemoryProperties2Builder {
inner: PhysicalDeviceMemoryProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMemoryProperties2Builder<'a> {
inner: PhysicalDeviceMemoryProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceMemoryProperties2 {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryProperties2Builder<'a> {
type Target = PhysicalDeviceMemoryProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMemoryProperties2Builder<'a> {
pub fn memory_properties(
mut self,
memory_properties: PhysicalDeviceMemoryProperties,
) -> PhysicalDeviceMemoryProperties2Builder<'a> {
self.inner.memory_properties = memory_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceMemoryProperties2>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceMemoryProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMemoryProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageFormatProperties2.html>"]
pub struct SparseImageFormatProperties2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub properties: SparseImageFormatProperties,
}
impl ::std::default::Default for SparseImageFormatProperties2 {
fn default() -> SparseImageFormatProperties2 {
SparseImageFormatProperties2 {
s_type: StructureType::SPARSE_IMAGE_FORMAT_PROPERTIES_2,
p_next: ::std::ptr::null_mut(),
properties: SparseImageFormatProperties::default(),
}
}
}
impl SparseImageFormatProperties2 {
pub fn builder<'a>() -> SparseImageFormatProperties2Builder<'a> {
SparseImageFormatProperties2Builder {
inner: SparseImageFormatProperties2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageFormatProperties2Builder<'a> {
inner: SparseImageFormatProperties2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSparseImageFormatProperties2 {}
impl<'a> ::std::ops::Deref for SparseImageFormatProperties2Builder<'a> {
type Target = SparseImageFormatProperties2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageFormatProperties2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageFormatProperties2Builder<'a> {
pub fn properties(
mut self,
properties: SparseImageFormatProperties,
) -> SparseImageFormatProperties2Builder<'a> {
self.inner.properties = properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSparseImageFormatProperties2>(
mut self,
next: &'a mut T,
) -> SparseImageFormatProperties2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageFormatProperties2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSparseImageFormatInfo2.html>"]
pub struct PhysicalDeviceSparseImageFormatInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub format: Format,
pub ty: ImageType,
pub samples: SampleCountFlags,
pub usage: ImageUsageFlags,
pub tiling: ImageTiling,
}
impl ::std::default::Default for PhysicalDeviceSparseImageFormatInfo2 {
fn default() -> PhysicalDeviceSparseImageFormatInfo2 {
PhysicalDeviceSparseImageFormatInfo2 {
s_type: StructureType::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
p_next: ::std::ptr::null(),
format: Format::default(),
ty: ImageType::default(),
samples: SampleCountFlags::default(),
usage: ImageUsageFlags::default(),
tiling: ImageTiling::default(),
}
}
}
impl PhysicalDeviceSparseImageFormatInfo2 {
pub fn builder<'a>() -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
PhysicalDeviceSparseImageFormatInfo2Builder {
inner: PhysicalDeviceSparseImageFormatInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
inner: PhysicalDeviceSparseImageFormatInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceSparseImageFormatInfo2 {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
type Target = PhysicalDeviceSparseImageFormatInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
pub fn format(mut self, format: Format) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
self.inner.format = format;
self
}
pub fn ty(mut self, ty: ImageType) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
self.inner.ty = ty;
self
}
pub fn samples(
mut self,
samples: SampleCountFlags,
) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
self.inner.samples = samples;
self
}
pub fn usage(
mut self,
usage: ImageUsageFlags,
) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
self.inner.usage = usage;
self
}
pub fn tiling(
mut self,
tiling: ImageTiling,
) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
self.inner.tiling = tiling;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceSparseImageFormatInfo2>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSparseImageFormatInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevicePushDescriptorPropertiesKHR.html>"]
pub struct PhysicalDevicePushDescriptorPropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_push_descriptors: u32,
}
impl ::std::default::Default for PhysicalDevicePushDescriptorPropertiesKHR {
fn default() -> PhysicalDevicePushDescriptorPropertiesKHR {
PhysicalDevicePushDescriptorPropertiesKHR {
s_type: StructureType::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
max_push_descriptors: u32::default(),
}
}
}
impl PhysicalDevicePushDescriptorPropertiesKHR {
pub fn builder<'a>() -> PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
PhysicalDevicePushDescriptorPropertiesKHRBuilder {
inner: PhysicalDevicePushDescriptorPropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
inner: PhysicalDevicePushDescriptorPropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePushDescriptorPropertiesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
type Target = PhysicalDevicePushDescriptorPropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
pub fn max_push_descriptors(
mut self,
max_push_descriptors: u32,
) -> PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
self.inner.max_push_descriptors = max_push_descriptors;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDevicePushDescriptorPropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkConformanceVersionKHR.html>"]
pub struct ConformanceVersionKHR {
pub major: u8,
pub minor: u8,
pub subminor: u8,
pub patch: u8,
}
impl ConformanceVersionKHR {
pub fn builder<'a>() -> ConformanceVersionKHRBuilder<'a> {
ConformanceVersionKHRBuilder {
inner: ConformanceVersionKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ConformanceVersionKHRBuilder<'a> {
inner: ConformanceVersionKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ConformanceVersionKHRBuilder<'a> {
type Target = ConformanceVersionKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ConformanceVersionKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ConformanceVersionKHRBuilder<'a> {
pub fn major(mut self, major: u8) -> ConformanceVersionKHRBuilder<'a> {
self.inner.major = major;
self
}
pub fn minor(mut self, minor: u8) -> ConformanceVersionKHRBuilder<'a> {
self.inner.minor = minor;
self
}
pub fn subminor(mut self, subminor: u8) -> ConformanceVersionKHRBuilder<'a> {
self.inner.subminor = subminor;
self
}
pub fn patch(mut self, patch: u8) -> ConformanceVersionKHRBuilder<'a> {
self.inner.patch = patch;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ConformanceVersionKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceDriverPropertiesKHR.html>"]
pub struct PhysicalDeviceDriverPropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub driver_id: DriverIdKHR,
pub driver_name: [c_char; MAX_DRIVER_NAME_SIZE_KHR],
pub driver_info: [c_char; MAX_DRIVER_INFO_SIZE_KHR],
pub conformance_version: ConformanceVersionKHR,
}
impl fmt::Debug for PhysicalDeviceDriverPropertiesKHR {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("PhysicalDeviceDriverPropertiesKHR")
.field("s_type", &self.s_type)
.field("p_next", &self.p_next)
.field("driver_id", &self.driver_id)
.field("driver_name", &unsafe {
::std::ffi::CStr::from_ptr(self.driver_name.as_ptr() as *const c_char)
})
.field("driver_info", &unsafe {
::std::ffi::CStr::from_ptr(self.driver_info.as_ptr() as *const c_char)
})
.field("conformance_version", &self.conformance_version)
.finish()
}
}
impl ::std::default::Default for PhysicalDeviceDriverPropertiesKHR {
fn default() -> PhysicalDeviceDriverPropertiesKHR {
PhysicalDeviceDriverPropertiesKHR {
s_type: StructureType::PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
driver_id: DriverIdKHR::default(),
driver_name: unsafe { ::std::mem::zeroed() },
driver_info: unsafe { ::std::mem::zeroed() },
conformance_version: ConformanceVersionKHR::default(),
}
}
}
impl PhysicalDeviceDriverPropertiesKHR {
pub fn builder<'a>() -> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
PhysicalDeviceDriverPropertiesKHRBuilder {
inner: PhysicalDeviceDriverPropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
inner: PhysicalDeviceDriverPropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDriverPropertiesKHRBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDriverPropertiesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
type Target = PhysicalDeviceDriverPropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
pub fn driver_id(
mut self,
driver_id: DriverIdKHR,
) -> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
self.inner.driver_id = driver_id;
self
}
pub fn driver_name(
mut self,
driver_name: [c_char; MAX_DRIVER_NAME_SIZE_KHR],
) -> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
self.inner.driver_name = driver_name;
self
}
pub fn driver_info(
mut self,
driver_info: [c_char; MAX_DRIVER_INFO_SIZE_KHR],
) -> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
self.inner.driver_info = driver_info;
self
}
pub fn conformance_version(
mut self,
conformance_version: ConformanceVersionKHR,
) -> PhysicalDeviceDriverPropertiesKHRBuilder<'a> {
self.inner.conformance_version = conformance_version;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceDriverPropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentRegionsKHR.html>"]
pub struct PresentRegionsKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain_count: u32,
pub p_regions: *const PresentRegionKHR,
}
impl ::std::default::Default for PresentRegionsKHR {
fn default() -> PresentRegionsKHR {
PresentRegionsKHR {
s_type: StructureType::PRESENT_REGIONS_KHR,
p_next: ::std::ptr::null(),
swapchain_count: u32::default(),
p_regions: ::std::ptr::null(),
}
}
}
impl PresentRegionsKHR {
pub fn builder<'a>() -> PresentRegionsKHRBuilder<'a> {
PresentRegionsKHRBuilder {
inner: PresentRegionsKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PresentRegionsKHRBuilder<'a> {
inner: PresentRegionsKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPresentInfoKHR for PresentRegionsKHRBuilder<'_> {}
unsafe impl ExtendsPresentInfoKHR for PresentRegionsKHR {}
impl<'a> ::std::ops::Deref for PresentRegionsKHRBuilder<'a> {
type Target = PresentRegionsKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PresentRegionsKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PresentRegionsKHRBuilder<'a> {
pub fn regions(mut self, regions: &'a [PresentRegionKHR]) -> PresentRegionsKHRBuilder<'a> {
self.inner.swapchain_count = regions.len() as _;
self.inner.p_regions = regions.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PresentRegionsKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentRegionKHR.html>"]
pub struct PresentRegionKHR {
pub rectangle_count: u32,
pub p_rectangles: *const RectLayerKHR,
}
impl ::std::default::Default for PresentRegionKHR {
fn default() -> PresentRegionKHR {
PresentRegionKHR {
rectangle_count: u32::default(),
p_rectangles: ::std::ptr::null(),
}
}
}
impl PresentRegionKHR {
pub fn builder<'a>() -> PresentRegionKHRBuilder<'a> {
PresentRegionKHRBuilder {
inner: PresentRegionKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PresentRegionKHRBuilder<'a> {
inner: PresentRegionKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PresentRegionKHRBuilder<'a> {
type Target = PresentRegionKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PresentRegionKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PresentRegionKHRBuilder<'a> {
pub fn rectangles(mut self, rectangles: &'a [RectLayerKHR]) -> PresentRegionKHRBuilder<'a> {
self.inner.rectangle_count = rectangles.len() as _;
self.inner.p_rectangles = rectangles.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PresentRegionKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRectLayerKHR.html>"]
pub struct RectLayerKHR {
pub offset: Offset2D,
pub extent: Extent2D,
pub layer: u32,
}
impl RectLayerKHR {
pub fn builder<'a>() -> RectLayerKHRBuilder<'a> {
RectLayerKHRBuilder {
inner: RectLayerKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RectLayerKHRBuilder<'a> {
inner: RectLayerKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for RectLayerKHRBuilder<'a> {
type Target = RectLayerKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RectLayerKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RectLayerKHRBuilder<'a> {
pub fn offset(mut self, offset: Offset2D) -> RectLayerKHRBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn extent(mut self, extent: Extent2D) -> RectLayerKHRBuilder<'a> {
self.inner.extent = extent;
self
}
pub fn layer(mut self, layer: u32) -> RectLayerKHRBuilder<'a> {
self.inner.layer = layer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RectLayerKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceVariablePointerFeatures.html>"]
pub struct PhysicalDeviceVariablePointerFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub variable_pointers_storage_buffer: Bool32,
pub variable_pointers: Bool32,
}
impl ::std::default::Default for PhysicalDeviceVariablePointerFeatures {
fn default() -> PhysicalDeviceVariablePointerFeatures {
PhysicalDeviceVariablePointerFeatures {
s_type: StructureType::PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
p_next: ::std::ptr::null_mut(),
variable_pointers_storage_buffer: Bool32::default(),
variable_pointers: Bool32::default(),
}
}
}
impl PhysicalDeviceVariablePointerFeatures {
pub fn builder<'a>() -> PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
PhysicalDeviceVariablePointerFeaturesBuilder {
inner: PhysicalDeviceVariablePointerFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
inner: PhysicalDeviceVariablePointerFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVariablePointerFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVariablePointerFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
type Target = PhysicalDeviceVariablePointerFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
pub fn variable_pointers_storage_buffer(
mut self,
variable_pointers_storage_buffer: bool,
) -> PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
self.inner.variable_pointers_storage_buffer = variable_pointers_storage_buffer.into();
self
}
pub fn variable_pointers(
mut self,
variable_pointers: bool,
) -> PhysicalDeviceVariablePointerFeaturesBuilder<'a> {
self.inner.variable_pointers = variable_pointers.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceVariablePointerFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryProperties.html>"]
pub struct ExternalMemoryProperties {
pub external_memory_features: ExternalMemoryFeatureFlags,
pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlags,
pub compatible_handle_types: ExternalMemoryHandleTypeFlags,
}
impl ExternalMemoryProperties {
pub fn builder<'a>() -> ExternalMemoryPropertiesBuilder<'a> {
ExternalMemoryPropertiesBuilder {
inner: ExternalMemoryProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalMemoryPropertiesBuilder<'a> {
inner: ExternalMemoryProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ExternalMemoryPropertiesBuilder<'a> {
type Target = ExternalMemoryProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalMemoryPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalMemoryPropertiesBuilder<'a> {
pub fn external_memory_features(
mut self,
external_memory_features: ExternalMemoryFeatureFlags,
) -> ExternalMemoryPropertiesBuilder<'a> {
self.inner.external_memory_features = external_memory_features;
self
}
pub fn export_from_imported_handle_types(
mut self,
export_from_imported_handle_types: ExternalMemoryHandleTypeFlags,
) -> ExternalMemoryPropertiesBuilder<'a> {
self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
self
}
pub fn compatible_handle_types(
mut self,
compatible_handle_types: ExternalMemoryHandleTypeFlags,
) -> ExternalMemoryPropertiesBuilder<'a> {
self.inner.compatible_handle_types = compatible_handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalMemoryProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExternalImageFormatInfo.html>"]
pub struct PhysicalDeviceExternalImageFormatInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for PhysicalDeviceExternalImageFormatInfo {
fn default() -> PhysicalDeviceExternalImageFormatInfo {
PhysicalDeviceExternalImageFormatInfo {
s_type: StructureType::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
p_next: ::std::ptr::null(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl PhysicalDeviceExternalImageFormatInfo {
pub fn builder<'a>() -> PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
PhysicalDeviceExternalImageFormatInfoBuilder {
inner: PhysicalDeviceExternalImageFormatInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
inner: PhysicalDeviceExternalImageFormatInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2
for PhysicalDeviceExternalImageFormatInfoBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceExternalImageFormatInfo {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
type Target = PhysicalDeviceExternalImageFormatInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExternalImageFormatInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalImageFormatProperties.html>"]
pub struct ExternalImageFormatProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub external_memory_properties: ExternalMemoryProperties,
}
impl ::std::default::Default for ExternalImageFormatProperties {
fn default() -> ExternalImageFormatProperties {
ExternalImageFormatProperties {
s_type: StructureType::EXTERNAL_IMAGE_FORMAT_PROPERTIES,
p_next: ::std::ptr::null_mut(),
external_memory_properties: ExternalMemoryProperties::default(),
}
}
}
impl ExternalImageFormatProperties {
pub fn builder<'a>() -> ExternalImageFormatPropertiesBuilder<'a> {
ExternalImageFormatPropertiesBuilder {
inner: ExternalImageFormatProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalImageFormatPropertiesBuilder<'a> {
inner: ExternalImageFormatProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageFormatProperties2 for ExternalImageFormatPropertiesBuilder<'_> {}
unsafe impl ExtendsImageFormatProperties2 for ExternalImageFormatProperties {}
impl<'a> ::std::ops::Deref for ExternalImageFormatPropertiesBuilder<'a> {
type Target = ExternalImageFormatProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalImageFormatPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalImageFormatPropertiesBuilder<'a> {
pub fn external_memory_properties(
mut self,
external_memory_properties: ExternalMemoryProperties,
) -> ExternalImageFormatPropertiesBuilder<'a> {
self.inner.external_memory_properties = external_memory_properties;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalImageFormatProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExternalBufferInfo.html>"]
pub struct PhysicalDeviceExternalBufferInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: BufferCreateFlags,
pub usage: BufferUsageFlags,
pub handle_type: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for PhysicalDeviceExternalBufferInfo {
fn default() -> PhysicalDeviceExternalBufferInfo {
PhysicalDeviceExternalBufferInfo {
s_type: StructureType::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
p_next: ::std::ptr::null(),
flags: BufferCreateFlags::default(),
usage: BufferUsageFlags::default(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl PhysicalDeviceExternalBufferInfo {
pub fn builder<'a>() -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
PhysicalDeviceExternalBufferInfoBuilder {
inner: PhysicalDeviceExternalBufferInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExternalBufferInfoBuilder<'a> {
inner: PhysicalDeviceExternalBufferInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceExternalBufferInfo {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExternalBufferInfoBuilder<'a> {
type Target = PhysicalDeviceExternalBufferInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalBufferInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExternalBufferInfoBuilder<'a> {
pub fn flags(
mut self,
flags: BufferCreateFlags,
) -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn usage(mut self, usage: BufferUsageFlags) -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
self.inner.usage = usage;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceExternalBufferInfo>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExternalBufferInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalBufferProperties.html>"]
pub struct ExternalBufferProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub external_memory_properties: ExternalMemoryProperties,
}
impl ::std::default::Default for ExternalBufferProperties {
fn default() -> ExternalBufferProperties {
ExternalBufferProperties {
s_type: StructureType::EXTERNAL_BUFFER_PROPERTIES,
p_next: ::std::ptr::null_mut(),
external_memory_properties: ExternalMemoryProperties::default(),
}
}
}
impl ExternalBufferProperties {
pub fn builder<'a>() -> ExternalBufferPropertiesBuilder<'a> {
ExternalBufferPropertiesBuilder {
inner: ExternalBufferProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalBufferPropertiesBuilder<'a> {
inner: ExternalBufferProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsExternalBufferProperties {}
impl<'a> ::std::ops::Deref for ExternalBufferPropertiesBuilder<'a> {
type Target = ExternalBufferProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalBufferPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalBufferPropertiesBuilder<'a> {
pub fn external_memory_properties(
mut self,
external_memory_properties: ExternalMemoryProperties,
) -> ExternalBufferPropertiesBuilder<'a> {
self.inner.external_memory_properties = external_memory_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsExternalBufferProperties>(
mut self,
next: &'a mut T,
) -> ExternalBufferPropertiesBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalBufferProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceIDProperties.html>"]
pub struct PhysicalDeviceIDProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub device_uuid: [u8; UUID_SIZE],
pub driver_uuid: [u8; UUID_SIZE],
pub device_luid: [u8; LUID_SIZE],
pub device_node_mask: u32,
pub device_luid_valid: Bool32,
}
impl ::std::default::Default for PhysicalDeviceIDProperties {
fn default() -> PhysicalDeviceIDProperties {
PhysicalDeviceIDProperties {
s_type: StructureType::PHYSICAL_DEVICE_ID_PROPERTIES,
p_next: ::std::ptr::null_mut(),
device_uuid: unsafe { ::std::mem::zeroed() },
driver_uuid: unsafe { ::std::mem::zeroed() },
device_luid: unsafe { ::std::mem::zeroed() },
device_node_mask: u32::default(),
device_luid_valid: Bool32::default(),
}
}
}
impl PhysicalDeviceIDProperties {
pub fn builder<'a>() -> PhysicalDeviceIDPropertiesBuilder<'a> {
PhysicalDeviceIDPropertiesBuilder {
inner: PhysicalDeviceIDProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceIDPropertiesBuilder<'a> {
inner: PhysicalDeviceIDProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceIDPropertiesBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceIDProperties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceIDPropertiesBuilder<'a> {
type Target = PhysicalDeviceIDProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceIDPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceIDPropertiesBuilder<'a> {
pub fn device_uuid(
mut self,
device_uuid: [u8; UUID_SIZE],
) -> PhysicalDeviceIDPropertiesBuilder<'a> {
self.inner.device_uuid = device_uuid;
self
}
pub fn driver_uuid(
mut self,
driver_uuid: [u8; UUID_SIZE],
) -> PhysicalDeviceIDPropertiesBuilder<'a> {
self.inner.driver_uuid = driver_uuid;
self
}
pub fn device_luid(
mut self,
device_luid: [u8; LUID_SIZE],
) -> PhysicalDeviceIDPropertiesBuilder<'a> {
self.inner.device_luid = device_luid;
self
}
pub fn device_node_mask(
mut self,
device_node_mask: u32,
) -> PhysicalDeviceIDPropertiesBuilder<'a> {
self.inner.device_node_mask = device_node_mask;
self
}
pub fn device_luid_valid(
mut self,
device_luid_valid: bool,
) -> PhysicalDeviceIDPropertiesBuilder<'a> {
self.inner.device_luid_valid = device_luid_valid.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceIDProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryImageCreateInfo.html>"]
pub struct ExternalMemoryImageCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for ExternalMemoryImageCreateInfo {
fn default() -> ExternalMemoryImageCreateInfo {
ExternalMemoryImageCreateInfo {
s_type: StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
p_next: ::std::ptr::null(),
handle_types: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl ExternalMemoryImageCreateInfo {
pub fn builder<'a>() -> ExternalMemoryImageCreateInfoBuilder<'a> {
ExternalMemoryImageCreateInfoBuilder {
inner: ExternalMemoryImageCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalMemoryImageCreateInfoBuilder<'a> {
inner: ExternalMemoryImageCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfo {}
impl<'a> ::std::ops::Deref for ExternalMemoryImageCreateInfoBuilder<'a> {
type Target = ExternalMemoryImageCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalMemoryImageCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalMemoryImageCreateInfoBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalMemoryHandleTypeFlags,
) -> ExternalMemoryImageCreateInfoBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalMemoryImageCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryBufferCreateInfo.html>"]
pub struct ExternalMemoryBufferCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for ExternalMemoryBufferCreateInfo {
fn default() -> ExternalMemoryBufferCreateInfo {
ExternalMemoryBufferCreateInfo {
s_type: StructureType::EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
p_next: ::std::ptr::null(),
handle_types: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl ExternalMemoryBufferCreateInfo {
pub fn builder<'a>() -> ExternalMemoryBufferCreateInfoBuilder<'a> {
ExternalMemoryBufferCreateInfoBuilder {
inner: ExternalMemoryBufferCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalMemoryBufferCreateInfoBuilder<'a> {
inner: ExternalMemoryBufferCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBufferCreateInfo for ExternalMemoryBufferCreateInfoBuilder<'_> {}
unsafe impl ExtendsBufferCreateInfo for ExternalMemoryBufferCreateInfo {}
impl<'a> ::std::ops::Deref for ExternalMemoryBufferCreateInfoBuilder<'a> {
type Target = ExternalMemoryBufferCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalMemoryBufferCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalMemoryBufferCreateInfoBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalMemoryHandleTypeFlags,
) -> ExternalMemoryBufferCreateInfoBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalMemoryBufferCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportMemoryAllocateInfo.html>"]
pub struct ExportMemoryAllocateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for ExportMemoryAllocateInfo {
fn default() -> ExportMemoryAllocateInfo {
ExportMemoryAllocateInfo {
s_type: StructureType::EXPORT_MEMORY_ALLOCATE_INFO,
p_next: ::std::ptr::null(),
handle_types: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl ExportMemoryAllocateInfo {
pub fn builder<'a>() -> ExportMemoryAllocateInfoBuilder<'a> {
ExportMemoryAllocateInfoBuilder {
inner: ExportMemoryAllocateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportMemoryAllocateInfoBuilder<'a> {
inner: ExportMemoryAllocateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfo {}
impl<'a> ::std::ops::Deref for ExportMemoryAllocateInfoBuilder<'a> {
type Target = ExportMemoryAllocateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportMemoryAllocateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportMemoryAllocateInfoBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalMemoryHandleTypeFlags,
) -> ExportMemoryAllocateInfoBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportMemoryAllocateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportMemoryWin32HandleInfoKHR.html>"]
pub struct ImportMemoryWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalMemoryHandleTypeFlags,
pub handle: HANDLE,
pub name: LPCWSTR,
}
impl ::std::default::Default for ImportMemoryWin32HandleInfoKHR {
fn default() -> ImportMemoryWin32HandleInfoKHR {
ImportMemoryWin32HandleInfoKHR {
s_type: StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
handle: unsafe { ::std::mem::zeroed() },
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ImportMemoryWin32HandleInfoKHR {
pub fn builder<'a>() -> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
ImportMemoryWin32HandleInfoKHRBuilder {
inner: ImportMemoryWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportMemoryWin32HandleInfoKHRBuilder<'a> {
inner: ImportMemoryWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoKHRBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ImportMemoryWin32HandleInfoKHRBuilder<'a> {
type Target = ImportMemoryWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportMemoryWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn handle(mut self, handle: HANDLE) -> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.handle = handle;
self
}
pub fn name(mut self, name: LPCWSTR) -> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportMemoryWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportMemoryWin32HandleInfoKHR.html>"]
pub struct ExportMemoryWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_attributes: *const SECURITY_ATTRIBUTES,
pub dw_access: DWORD,
pub name: LPCWSTR,
}
impl ::std::default::Default for ExportMemoryWin32HandleInfoKHR {
fn default() -> ExportMemoryWin32HandleInfoKHR {
ExportMemoryWin32HandleInfoKHR {
s_type: StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
p_attributes: ::std::ptr::null(),
dw_access: DWORD::default(),
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ExportMemoryWin32HandleInfoKHR {
pub fn builder<'a>() -> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
ExportMemoryWin32HandleInfoKHRBuilder {
inner: ExportMemoryWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportMemoryWin32HandleInfoKHRBuilder<'a> {
inner: ExportMemoryWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoKHRBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ExportMemoryWin32HandleInfoKHRBuilder<'a> {
type Target = ExportMemoryWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportMemoryWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
pub fn attributes(
mut self,
attributes: &'a SECURITY_ATTRIBUTES,
) -> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.p_attributes = attributes;
self
}
pub fn dw_access(mut self, dw_access: DWORD) -> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.dw_access = dw_access;
self
}
pub fn name(mut self, name: LPCWSTR) -> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportMemoryWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryWin32HandlePropertiesKHR.html>"]
pub struct MemoryWin32HandlePropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_type_bits: u32,
}
impl ::std::default::Default for MemoryWin32HandlePropertiesKHR {
fn default() -> MemoryWin32HandlePropertiesKHR {
MemoryWin32HandlePropertiesKHR {
s_type: StructureType::MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
memory_type_bits: u32::default(),
}
}
}
impl MemoryWin32HandlePropertiesKHR {
pub fn builder<'a>() -> MemoryWin32HandlePropertiesKHRBuilder<'a> {
MemoryWin32HandlePropertiesKHRBuilder {
inner: MemoryWin32HandlePropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryWin32HandlePropertiesKHRBuilder<'a> {
inner: MemoryWin32HandlePropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryWin32HandlePropertiesKHR {}
impl<'a> ::std::ops::Deref for MemoryWin32HandlePropertiesKHRBuilder<'a> {
type Target = MemoryWin32HandlePropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryWin32HandlePropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryWin32HandlePropertiesKHRBuilder<'a> {
pub fn memory_type_bits(
mut self,
memory_type_bits: u32,
) -> MemoryWin32HandlePropertiesKHRBuilder<'a> {
self.inner.memory_type_bits = memory_type_bits;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryWin32HandlePropertiesKHR>(
mut self,
next: &'a mut T,
) -> MemoryWin32HandlePropertiesKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryWin32HandlePropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryGetWin32HandleInfoKHR.html>"]
pub struct MemoryGetWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub memory: DeviceMemory,
pub handle_type: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for MemoryGetWin32HandleInfoKHR {
fn default() -> MemoryGetWin32HandleInfoKHR {
MemoryGetWin32HandleInfoKHR {
s_type: StructureType::MEMORY_GET_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
memory: DeviceMemory::default(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl MemoryGetWin32HandleInfoKHR {
pub fn builder<'a>() -> MemoryGetWin32HandleInfoKHRBuilder<'a> {
MemoryGetWin32HandleInfoKHRBuilder {
inner: MemoryGetWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryGetWin32HandleInfoKHRBuilder<'a> {
inner: MemoryGetWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryGetWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for MemoryGetWin32HandleInfoKHRBuilder<'a> {
type Target = MemoryGetWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryGetWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryGetWin32HandleInfoKHRBuilder<'a> {
pub fn memory(mut self, memory: DeviceMemory) -> MemoryGetWin32HandleInfoKHRBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> MemoryGetWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryGetWin32HandleInfoKHR>(
mut self,
next: &'a mut T,
) -> MemoryGetWin32HandleInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryGetWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportMemoryFdInfoKHR.html>"]
pub struct ImportMemoryFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalMemoryHandleTypeFlags,
pub fd: c_int,
}
impl ::std::default::Default for ImportMemoryFdInfoKHR {
fn default() -> ImportMemoryFdInfoKHR {
ImportMemoryFdInfoKHR {
s_type: StructureType::IMPORT_MEMORY_FD_INFO_KHR,
p_next: ::std::ptr::null(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
fd: c_int::default(),
}
}
}
impl ImportMemoryFdInfoKHR {
pub fn builder<'a>() -> ImportMemoryFdInfoKHRBuilder<'a> {
ImportMemoryFdInfoKHRBuilder {
inner: ImportMemoryFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportMemoryFdInfoKHRBuilder<'a> {
inner: ImportMemoryFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryFdInfoKHRBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryFdInfoKHR {}
impl<'a> ::std::ops::Deref for ImportMemoryFdInfoKHRBuilder<'a> {
type Target = ImportMemoryFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportMemoryFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportMemoryFdInfoKHRBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> ImportMemoryFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn fd(mut self, fd: c_int) -> ImportMemoryFdInfoKHRBuilder<'a> {
self.inner.fd = fd;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportMemoryFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryFdPropertiesKHR.html>"]
pub struct MemoryFdPropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_type_bits: u32,
}
impl ::std::default::Default for MemoryFdPropertiesKHR {
fn default() -> MemoryFdPropertiesKHR {
MemoryFdPropertiesKHR {
s_type: StructureType::MEMORY_FD_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
memory_type_bits: u32::default(),
}
}
}
impl MemoryFdPropertiesKHR {
pub fn builder<'a>() -> MemoryFdPropertiesKHRBuilder<'a> {
MemoryFdPropertiesKHRBuilder {
inner: MemoryFdPropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryFdPropertiesKHRBuilder<'a> {
inner: MemoryFdPropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryFdPropertiesKHR {}
impl<'a> ::std::ops::Deref for MemoryFdPropertiesKHRBuilder<'a> {
type Target = MemoryFdPropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryFdPropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryFdPropertiesKHRBuilder<'a> {
pub fn memory_type_bits(mut self, memory_type_bits: u32) -> MemoryFdPropertiesKHRBuilder<'a> {
self.inner.memory_type_bits = memory_type_bits;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryFdPropertiesKHR>(
mut self,
next: &'a mut T,
) -> MemoryFdPropertiesKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryFdPropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryGetFdInfoKHR.html>"]
pub struct MemoryGetFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub memory: DeviceMemory,
pub handle_type: ExternalMemoryHandleTypeFlags,
}
impl ::std::default::Default for MemoryGetFdInfoKHR {
fn default() -> MemoryGetFdInfoKHR {
MemoryGetFdInfoKHR {
s_type: StructureType::MEMORY_GET_FD_INFO_KHR,
p_next: ::std::ptr::null(),
memory: DeviceMemory::default(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
}
}
}
impl MemoryGetFdInfoKHR {
pub fn builder<'a>() -> MemoryGetFdInfoKHRBuilder<'a> {
MemoryGetFdInfoKHRBuilder {
inner: MemoryGetFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryGetFdInfoKHRBuilder<'a> {
inner: MemoryGetFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryGetFdInfoKHR {}
impl<'a> ::std::ops::Deref for MemoryGetFdInfoKHRBuilder<'a> {
type Target = MemoryGetFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryGetFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryGetFdInfoKHRBuilder<'a> {
pub fn memory(mut self, memory: DeviceMemory) -> MemoryGetFdInfoKHRBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> MemoryGetFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryGetFdInfoKHR>(
mut self,
next: &'a mut T,
) -> MemoryGetFdInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryGetFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWin32KeyedMutexAcquireReleaseInfoKHR.html>"]
pub struct Win32KeyedMutexAcquireReleaseInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub acquire_count: u32,
pub p_acquire_syncs: *const DeviceMemory,
pub p_acquire_keys: *const u64,
pub p_acquire_timeouts: *const u32,
pub release_count: u32,
pub p_release_syncs: *const DeviceMemory,
pub p_release_keys: *const u64,
}
impl ::std::default::Default for Win32KeyedMutexAcquireReleaseInfoKHR {
fn default() -> Win32KeyedMutexAcquireReleaseInfoKHR {
Win32KeyedMutexAcquireReleaseInfoKHR {
s_type: StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
p_next: ::std::ptr::null(),
acquire_count: u32::default(),
p_acquire_syncs: ::std::ptr::null(),
p_acquire_keys: ::std::ptr::null(),
p_acquire_timeouts: ::std::ptr::null(),
release_count: u32::default(),
p_release_syncs: ::std::ptr::null(),
p_release_keys: ::std::ptr::null(),
}
}
}
impl Win32KeyedMutexAcquireReleaseInfoKHR {
pub fn builder<'a>() -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
Win32KeyedMutexAcquireReleaseInfoKHRBuilder {
inner: Win32KeyedMutexAcquireReleaseInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
inner: Win32KeyedMutexAcquireReleaseInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'_> {}
unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHR {}
impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
type Target = Win32KeyedMutexAcquireReleaseInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
pub fn acquire_syncs(
mut self,
acquire_syncs: &'a [DeviceMemory],
) -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
self.inner.acquire_count = acquire_syncs.len() as _;
self.inner.p_acquire_syncs = acquire_syncs.as_ptr();
self
}
pub fn acquire_keys(
mut self,
acquire_keys: &'a [u64],
) -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
self.inner.acquire_count = acquire_keys.len() as _;
self.inner.p_acquire_keys = acquire_keys.as_ptr();
self
}
pub fn acquire_timeouts(
mut self,
acquire_timeouts: &'a [u32],
) -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
self.inner.acquire_count = acquire_timeouts.len() as _;
self.inner.p_acquire_timeouts = acquire_timeouts.as_ptr();
self
}
pub fn release_syncs(
mut self,
release_syncs: &'a [DeviceMemory],
) -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
self.inner.release_count = release_syncs.len() as _;
self.inner.p_release_syncs = release_syncs.as_ptr();
self
}
pub fn release_keys(
mut self,
release_keys: &'a [u64],
) -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
self.inner.release_count = release_keys.len() as _;
self.inner.p_release_keys = release_keys.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> Win32KeyedMutexAcquireReleaseInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html>"]
pub struct PhysicalDeviceExternalSemaphoreInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalSemaphoreHandleTypeFlags,
}
impl ::std::default::Default for PhysicalDeviceExternalSemaphoreInfo {
fn default() -> PhysicalDeviceExternalSemaphoreInfo {
PhysicalDeviceExternalSemaphoreInfo {
s_type: StructureType::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
p_next: ::std::ptr::null(),
handle_type: ExternalSemaphoreHandleTypeFlags::default(),
}
}
}
impl PhysicalDeviceExternalSemaphoreInfo {
pub fn builder<'a>() -> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
PhysicalDeviceExternalSemaphoreInfoBuilder {
inner: PhysicalDeviceExternalSemaphoreInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
inner: PhysicalDeviceExternalSemaphoreInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceExternalSemaphoreInfo {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
type Target = PhysicalDeviceExternalSemaphoreInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalSemaphoreHandleTypeFlags,
) -> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceExternalSemaphoreInfo>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExternalSemaphoreInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalSemaphoreProperties.html>"]
pub struct ExternalSemaphoreProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags,
pub compatible_handle_types: ExternalSemaphoreHandleTypeFlags,
pub external_semaphore_features: ExternalSemaphoreFeatureFlags,
}
impl ::std::default::Default for ExternalSemaphoreProperties {
fn default() -> ExternalSemaphoreProperties {
ExternalSemaphoreProperties {
s_type: StructureType::EXTERNAL_SEMAPHORE_PROPERTIES,
p_next: ::std::ptr::null_mut(),
export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags::default(),
compatible_handle_types: ExternalSemaphoreHandleTypeFlags::default(),
external_semaphore_features: ExternalSemaphoreFeatureFlags::default(),
}
}
}
impl ExternalSemaphoreProperties {
pub fn builder<'a>() -> ExternalSemaphorePropertiesBuilder<'a> {
ExternalSemaphorePropertiesBuilder {
inner: ExternalSemaphoreProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalSemaphorePropertiesBuilder<'a> {
inner: ExternalSemaphoreProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsExternalSemaphoreProperties {}
impl<'a> ::std::ops::Deref for ExternalSemaphorePropertiesBuilder<'a> {
type Target = ExternalSemaphoreProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalSemaphorePropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalSemaphorePropertiesBuilder<'a> {
pub fn export_from_imported_handle_types(
mut self,
export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags,
) -> ExternalSemaphorePropertiesBuilder<'a> {
self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
self
}
pub fn compatible_handle_types(
mut self,
compatible_handle_types: ExternalSemaphoreHandleTypeFlags,
) -> ExternalSemaphorePropertiesBuilder<'a> {
self.inner.compatible_handle_types = compatible_handle_types;
self
}
pub fn external_semaphore_features(
mut self,
external_semaphore_features: ExternalSemaphoreFeatureFlags,
) -> ExternalSemaphorePropertiesBuilder<'a> {
self.inner.external_semaphore_features = external_semaphore_features;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsExternalSemaphoreProperties>(
mut self,
next: &'a mut T,
) -> ExternalSemaphorePropertiesBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalSemaphoreProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportSemaphoreCreateInfo.html>"]
pub struct ExportSemaphoreCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalSemaphoreHandleTypeFlags,
}
impl ::std::default::Default for ExportSemaphoreCreateInfo {
fn default() -> ExportSemaphoreCreateInfo {
ExportSemaphoreCreateInfo {
s_type: StructureType::EXPORT_SEMAPHORE_CREATE_INFO,
p_next: ::std::ptr::null(),
handle_types: ExternalSemaphoreHandleTypeFlags::default(),
}
}
}
impl ExportSemaphoreCreateInfo {
pub fn builder<'a>() -> ExportSemaphoreCreateInfoBuilder<'a> {
ExportSemaphoreCreateInfoBuilder {
inner: ExportSemaphoreCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportSemaphoreCreateInfoBuilder<'a> {
inner: ExportSemaphoreCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreCreateInfoBuilder<'_> {}
unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreCreateInfo {}
impl<'a> ::std::ops::Deref for ExportSemaphoreCreateInfoBuilder<'a> {
type Target = ExportSemaphoreCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportSemaphoreCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportSemaphoreCreateInfoBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalSemaphoreHandleTypeFlags,
) -> ExportSemaphoreCreateInfoBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportSemaphoreCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportSemaphoreWin32HandleInfoKHR.html>"]
pub struct ImportSemaphoreWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub semaphore: Semaphore,
pub flags: SemaphoreImportFlags,
pub handle_type: ExternalSemaphoreHandleTypeFlags,
pub handle: HANDLE,
pub name: LPCWSTR,
}
impl ::std::default::Default for ImportSemaphoreWin32HandleInfoKHR {
fn default() -> ImportSemaphoreWin32HandleInfoKHR {
ImportSemaphoreWin32HandleInfoKHR {
s_type: StructureType::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
semaphore: Semaphore::default(),
flags: SemaphoreImportFlags::default(),
handle_type: ExternalSemaphoreHandleTypeFlags::default(),
handle: unsafe { ::std::mem::zeroed() },
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ImportSemaphoreWin32HandleInfoKHR {
pub fn builder<'a>() -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
ImportSemaphoreWin32HandleInfoKHRBuilder {
inner: ImportSemaphoreWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
inner: ImportSemaphoreWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImportSemaphoreWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
type Target = ImportSemaphoreWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
pub fn semaphore(
mut self,
semaphore: Semaphore,
) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.semaphore = semaphore;
self
}
pub fn flags(
mut self,
flags: SemaphoreImportFlags,
) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalSemaphoreHandleTypeFlags,
) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn handle(mut self, handle: HANDLE) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.handle = handle;
self
}
pub fn name(mut self, name: LPCWSTR) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImportSemaphoreWin32HandleInfoKHR>(
mut self,
next: &'a mut T,
) -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportSemaphoreWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportSemaphoreWin32HandleInfoKHR.html>"]
pub struct ExportSemaphoreWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_attributes: *const SECURITY_ATTRIBUTES,
pub dw_access: DWORD,
pub name: LPCWSTR,
}
impl ::std::default::Default for ExportSemaphoreWin32HandleInfoKHR {
fn default() -> ExportSemaphoreWin32HandleInfoKHR {
ExportSemaphoreWin32HandleInfoKHR {
s_type: StructureType::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
p_attributes: ::std::ptr::null(),
dw_access: DWORD::default(),
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ExportSemaphoreWin32HandleInfoKHR {
pub fn builder<'a>() -> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
ExportSemaphoreWin32HandleInfoKHRBuilder {
inner: ExportSemaphoreWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
inner: ExportSemaphoreWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreWin32HandleInfoKHRBuilder<'_> {}
unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
type Target = ExportSemaphoreWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
pub fn attributes(
mut self,
attributes: &'a SECURITY_ATTRIBUTES,
) -> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.p_attributes = attributes;
self
}
pub fn dw_access(mut self, dw_access: DWORD) -> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.dw_access = dw_access;
self
}
pub fn name(mut self, name: LPCWSTR) -> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportSemaphoreWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkD3D12FenceSubmitInfoKHR.html>"]
pub struct D3D12FenceSubmitInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub wait_semaphore_values_count: u32,
pub p_wait_semaphore_values: *const u64,
pub signal_semaphore_values_count: u32,
pub p_signal_semaphore_values: *const u64,
}
impl ::std::default::Default for D3D12FenceSubmitInfoKHR {
fn default() -> D3D12FenceSubmitInfoKHR {
D3D12FenceSubmitInfoKHR {
s_type: StructureType::D3D12_FENCE_SUBMIT_INFO_KHR,
p_next: ::std::ptr::null(),
wait_semaphore_values_count: u32::default(),
p_wait_semaphore_values: ::std::ptr::null(),
signal_semaphore_values_count: u32::default(),
p_signal_semaphore_values: ::std::ptr::null(),
}
}
}
impl D3D12FenceSubmitInfoKHR {
pub fn builder<'a>() -> D3D12FenceSubmitInfoKHRBuilder<'a> {
D3D12FenceSubmitInfoKHRBuilder {
inner: D3D12FenceSubmitInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct D3D12FenceSubmitInfoKHRBuilder<'a> {
inner: D3D12FenceSubmitInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubmitInfo for D3D12FenceSubmitInfoKHRBuilder<'_> {}
unsafe impl ExtendsSubmitInfo for D3D12FenceSubmitInfoKHR {}
impl<'a> ::std::ops::Deref for D3D12FenceSubmitInfoKHRBuilder<'a> {
type Target = D3D12FenceSubmitInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for D3D12FenceSubmitInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> D3D12FenceSubmitInfoKHRBuilder<'a> {
pub fn wait_semaphore_values(
mut self,
wait_semaphore_values: &'a [u64],
) -> D3D12FenceSubmitInfoKHRBuilder<'a> {
self.inner.wait_semaphore_values_count = wait_semaphore_values.len() as _;
self.inner.p_wait_semaphore_values = wait_semaphore_values.as_ptr();
self
}
pub fn signal_semaphore_values(
mut self,
signal_semaphore_values: &'a [u64],
) -> D3D12FenceSubmitInfoKHRBuilder<'a> {
self.inner.signal_semaphore_values_count = signal_semaphore_values.len() as _;
self.inner.p_signal_semaphore_values = signal_semaphore_values.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> D3D12FenceSubmitInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphoreGetWin32HandleInfoKHR.html>"]
pub struct SemaphoreGetWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub semaphore: Semaphore,
pub handle_type: ExternalSemaphoreHandleTypeFlags,
}
impl ::std::default::Default for SemaphoreGetWin32HandleInfoKHR {
fn default() -> SemaphoreGetWin32HandleInfoKHR {
SemaphoreGetWin32HandleInfoKHR {
s_type: StructureType::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
semaphore: Semaphore::default(),
handle_type: ExternalSemaphoreHandleTypeFlags::default(),
}
}
}
impl SemaphoreGetWin32HandleInfoKHR {
pub fn builder<'a>() -> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
SemaphoreGetWin32HandleInfoKHRBuilder {
inner: SemaphoreGetWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
inner: SemaphoreGetWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSemaphoreGetWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
type Target = SemaphoreGetWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
pub fn semaphore(mut self, semaphore: Semaphore) -> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
self.inner.semaphore = semaphore;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalSemaphoreHandleTypeFlags,
) -> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSemaphoreGetWin32HandleInfoKHR>(
mut self,
next: &'a mut T,
) -> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SemaphoreGetWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportSemaphoreFdInfoKHR.html>"]
pub struct ImportSemaphoreFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub semaphore: Semaphore,
pub flags: SemaphoreImportFlags,
pub handle_type: ExternalSemaphoreHandleTypeFlags,
pub fd: c_int,
}
impl ::std::default::Default for ImportSemaphoreFdInfoKHR {
fn default() -> ImportSemaphoreFdInfoKHR {
ImportSemaphoreFdInfoKHR {
s_type: StructureType::IMPORT_SEMAPHORE_FD_INFO_KHR,
p_next: ::std::ptr::null(),
semaphore: Semaphore::default(),
flags: SemaphoreImportFlags::default(),
handle_type: ExternalSemaphoreHandleTypeFlags::default(),
fd: c_int::default(),
}
}
}
impl ImportSemaphoreFdInfoKHR {
pub fn builder<'a>() -> ImportSemaphoreFdInfoKHRBuilder<'a> {
ImportSemaphoreFdInfoKHRBuilder {
inner: ImportSemaphoreFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportSemaphoreFdInfoKHRBuilder<'a> {
inner: ImportSemaphoreFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImportSemaphoreFdInfoKHR {}
impl<'a> ::std::ops::Deref for ImportSemaphoreFdInfoKHRBuilder<'a> {
type Target = ImportSemaphoreFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportSemaphoreFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportSemaphoreFdInfoKHRBuilder<'a> {
pub fn semaphore(mut self, semaphore: Semaphore) -> ImportSemaphoreFdInfoKHRBuilder<'a> {
self.inner.semaphore = semaphore;
self
}
pub fn flags(mut self, flags: SemaphoreImportFlags) -> ImportSemaphoreFdInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalSemaphoreHandleTypeFlags,
) -> ImportSemaphoreFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn fd(mut self, fd: c_int) -> ImportSemaphoreFdInfoKHRBuilder<'a> {
self.inner.fd = fd;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImportSemaphoreFdInfoKHR>(
mut self,
next: &'a mut T,
) -> ImportSemaphoreFdInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportSemaphoreFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphoreGetFdInfoKHR.html>"]
pub struct SemaphoreGetFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub semaphore: Semaphore,
pub handle_type: ExternalSemaphoreHandleTypeFlags,
}
impl ::std::default::Default for SemaphoreGetFdInfoKHR {
fn default() -> SemaphoreGetFdInfoKHR {
SemaphoreGetFdInfoKHR {
s_type: StructureType::SEMAPHORE_GET_FD_INFO_KHR,
p_next: ::std::ptr::null(),
semaphore: Semaphore::default(),
handle_type: ExternalSemaphoreHandleTypeFlags::default(),
}
}
}
impl SemaphoreGetFdInfoKHR {
pub fn builder<'a>() -> SemaphoreGetFdInfoKHRBuilder<'a> {
SemaphoreGetFdInfoKHRBuilder {
inner: SemaphoreGetFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SemaphoreGetFdInfoKHRBuilder<'a> {
inner: SemaphoreGetFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSemaphoreGetFdInfoKHR {}
impl<'a> ::std::ops::Deref for SemaphoreGetFdInfoKHRBuilder<'a> {
type Target = SemaphoreGetFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SemaphoreGetFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SemaphoreGetFdInfoKHRBuilder<'a> {
pub fn semaphore(mut self, semaphore: Semaphore) -> SemaphoreGetFdInfoKHRBuilder<'a> {
self.inner.semaphore = semaphore;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalSemaphoreHandleTypeFlags,
) -> SemaphoreGetFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSemaphoreGetFdInfoKHR>(
mut self,
next: &'a mut T,
) -> SemaphoreGetFdInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SemaphoreGetFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExternalFenceInfo.html>"]
pub struct PhysicalDeviceExternalFenceInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalFenceHandleTypeFlags,
}
impl ::std::default::Default for PhysicalDeviceExternalFenceInfo {
fn default() -> PhysicalDeviceExternalFenceInfo {
PhysicalDeviceExternalFenceInfo {
s_type: StructureType::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
p_next: ::std::ptr::null(),
handle_type: ExternalFenceHandleTypeFlags::default(),
}
}
}
impl PhysicalDeviceExternalFenceInfo {
pub fn builder<'a>() -> PhysicalDeviceExternalFenceInfoBuilder<'a> {
PhysicalDeviceExternalFenceInfoBuilder {
inner: PhysicalDeviceExternalFenceInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExternalFenceInfoBuilder<'a> {
inner: PhysicalDeviceExternalFenceInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceExternalFenceInfo {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExternalFenceInfoBuilder<'a> {
type Target = PhysicalDeviceExternalFenceInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalFenceInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExternalFenceInfoBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalFenceHandleTypeFlags,
) -> PhysicalDeviceExternalFenceInfoBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceExternalFenceInfo>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceExternalFenceInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExternalFenceInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalFenceProperties.html>"]
pub struct ExternalFenceProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub export_from_imported_handle_types: ExternalFenceHandleTypeFlags,
pub compatible_handle_types: ExternalFenceHandleTypeFlags,
pub external_fence_features: ExternalFenceFeatureFlags,
}
impl ::std::default::Default for ExternalFenceProperties {
fn default() -> ExternalFenceProperties {
ExternalFenceProperties {
s_type: StructureType::EXTERNAL_FENCE_PROPERTIES,
p_next: ::std::ptr::null_mut(),
export_from_imported_handle_types: ExternalFenceHandleTypeFlags::default(),
compatible_handle_types: ExternalFenceHandleTypeFlags::default(),
external_fence_features: ExternalFenceFeatureFlags::default(),
}
}
}
impl ExternalFenceProperties {
pub fn builder<'a>() -> ExternalFencePropertiesBuilder<'a> {
ExternalFencePropertiesBuilder {
inner: ExternalFenceProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalFencePropertiesBuilder<'a> {
inner: ExternalFenceProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsExternalFenceProperties {}
impl<'a> ::std::ops::Deref for ExternalFencePropertiesBuilder<'a> {
type Target = ExternalFenceProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalFencePropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalFencePropertiesBuilder<'a> {
pub fn export_from_imported_handle_types(
mut self,
export_from_imported_handle_types: ExternalFenceHandleTypeFlags,
) -> ExternalFencePropertiesBuilder<'a> {
self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
self
}
pub fn compatible_handle_types(
mut self,
compatible_handle_types: ExternalFenceHandleTypeFlags,
) -> ExternalFencePropertiesBuilder<'a> {
self.inner.compatible_handle_types = compatible_handle_types;
self
}
pub fn external_fence_features(
mut self,
external_fence_features: ExternalFenceFeatureFlags,
) -> ExternalFencePropertiesBuilder<'a> {
self.inner.external_fence_features = external_fence_features;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsExternalFenceProperties>(
mut self,
next: &'a mut T,
) -> ExternalFencePropertiesBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalFenceProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportFenceCreateInfo.html>"]
pub struct ExportFenceCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_types: ExternalFenceHandleTypeFlags,
}
impl ::std::default::Default for ExportFenceCreateInfo {
fn default() -> ExportFenceCreateInfo {
ExportFenceCreateInfo {
s_type: StructureType::EXPORT_FENCE_CREATE_INFO,
p_next: ::std::ptr::null(),
handle_types: ExternalFenceHandleTypeFlags::default(),
}
}
}
impl ExportFenceCreateInfo {
pub fn builder<'a>() -> ExportFenceCreateInfoBuilder<'a> {
ExportFenceCreateInfoBuilder {
inner: ExportFenceCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportFenceCreateInfoBuilder<'a> {
inner: ExportFenceCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsFenceCreateInfo for ExportFenceCreateInfoBuilder<'_> {}
unsafe impl ExtendsFenceCreateInfo for ExportFenceCreateInfo {}
impl<'a> ::std::ops::Deref for ExportFenceCreateInfoBuilder<'a> {
type Target = ExportFenceCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportFenceCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportFenceCreateInfoBuilder<'a> {
pub fn handle_types(
mut self,
handle_types: ExternalFenceHandleTypeFlags,
) -> ExportFenceCreateInfoBuilder<'a> {
self.inner.handle_types = handle_types;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportFenceCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportFenceWin32HandleInfoKHR.html>"]
pub struct ImportFenceWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub fence: Fence,
pub flags: FenceImportFlags,
pub handle_type: ExternalFenceHandleTypeFlags,
pub handle: HANDLE,
pub name: LPCWSTR,
}
impl ::std::default::Default for ImportFenceWin32HandleInfoKHR {
fn default() -> ImportFenceWin32HandleInfoKHR {
ImportFenceWin32HandleInfoKHR {
s_type: StructureType::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
fence: Fence::default(),
flags: FenceImportFlags::default(),
handle_type: ExternalFenceHandleTypeFlags::default(),
handle: unsafe { ::std::mem::zeroed() },
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ImportFenceWin32HandleInfoKHR {
pub fn builder<'a>() -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
ImportFenceWin32HandleInfoKHRBuilder {
inner: ImportFenceWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportFenceWin32HandleInfoKHRBuilder<'a> {
inner: ImportFenceWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImportFenceWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ImportFenceWin32HandleInfoKHRBuilder<'a> {
type Target = ImportFenceWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportFenceWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportFenceWin32HandleInfoKHRBuilder<'a> {
pub fn fence(mut self, fence: Fence) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.fence = fence;
self
}
pub fn flags(mut self, flags: FenceImportFlags) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalFenceHandleTypeFlags,
) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn handle(mut self, handle: HANDLE) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.handle = handle;
self
}
pub fn name(mut self, name: LPCWSTR) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImportFenceWin32HandleInfoKHR>(
mut self,
next: &'a mut T,
) -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportFenceWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExportFenceWin32HandleInfoKHR.html>"]
pub struct ExportFenceWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_attributes: *const SECURITY_ATTRIBUTES,
pub dw_access: DWORD,
pub name: LPCWSTR,
}
impl ::std::default::Default for ExportFenceWin32HandleInfoKHR {
fn default() -> ExportFenceWin32HandleInfoKHR {
ExportFenceWin32HandleInfoKHR {
s_type: StructureType::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
p_attributes: ::std::ptr::null(),
dw_access: DWORD::default(),
name: unsafe { ::std::mem::zeroed() },
}
}
}
impl ExportFenceWin32HandleInfoKHR {
pub fn builder<'a>() -> ExportFenceWin32HandleInfoKHRBuilder<'a> {
ExportFenceWin32HandleInfoKHRBuilder {
inner: ExportFenceWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExportFenceWin32HandleInfoKHRBuilder<'a> {
inner: ExportFenceWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsFenceCreateInfo for ExportFenceWin32HandleInfoKHRBuilder<'_> {}
unsafe impl ExtendsFenceCreateInfo for ExportFenceWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for ExportFenceWin32HandleInfoKHRBuilder<'a> {
type Target = ExportFenceWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExportFenceWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExportFenceWin32HandleInfoKHRBuilder<'a> {
pub fn attributes(
mut self,
attributes: &'a SECURITY_ATTRIBUTES,
) -> ExportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.p_attributes = attributes;
self
}
pub fn dw_access(mut self, dw_access: DWORD) -> ExportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.dw_access = dw_access;
self
}
pub fn name(mut self, name: LPCWSTR) -> ExportFenceWin32HandleInfoKHRBuilder<'a> {
self.inner.name = name;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExportFenceWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFenceGetWin32HandleInfoKHR.html>"]
pub struct FenceGetWin32HandleInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub fence: Fence,
pub handle_type: ExternalFenceHandleTypeFlags,
}
impl ::std::default::Default for FenceGetWin32HandleInfoKHR {
fn default() -> FenceGetWin32HandleInfoKHR {
FenceGetWin32HandleInfoKHR {
s_type: StructureType::FENCE_GET_WIN32_HANDLE_INFO_KHR,
p_next: ::std::ptr::null(),
fence: Fence::default(),
handle_type: ExternalFenceHandleTypeFlags::default(),
}
}
}
impl FenceGetWin32HandleInfoKHR {
pub fn builder<'a>() -> FenceGetWin32HandleInfoKHRBuilder<'a> {
FenceGetWin32HandleInfoKHRBuilder {
inner: FenceGetWin32HandleInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FenceGetWin32HandleInfoKHRBuilder<'a> {
inner: FenceGetWin32HandleInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsFenceGetWin32HandleInfoKHR {}
impl<'a> ::std::ops::Deref for FenceGetWin32HandleInfoKHRBuilder<'a> {
type Target = FenceGetWin32HandleInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FenceGetWin32HandleInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FenceGetWin32HandleInfoKHRBuilder<'a> {
pub fn fence(mut self, fence: Fence) -> FenceGetWin32HandleInfoKHRBuilder<'a> {
self.inner.fence = fence;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalFenceHandleTypeFlags,
) -> FenceGetWin32HandleInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsFenceGetWin32HandleInfoKHR>(
mut self,
next: &'a mut T,
) -> FenceGetWin32HandleInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FenceGetWin32HandleInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportFenceFdInfoKHR.html>"]
pub struct ImportFenceFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub fence: Fence,
pub flags: FenceImportFlags,
pub handle_type: ExternalFenceHandleTypeFlags,
pub fd: c_int,
}
impl ::std::default::Default for ImportFenceFdInfoKHR {
fn default() -> ImportFenceFdInfoKHR {
ImportFenceFdInfoKHR {
s_type: StructureType::IMPORT_FENCE_FD_INFO_KHR,
p_next: ::std::ptr::null(),
fence: Fence::default(),
flags: FenceImportFlags::default(),
handle_type: ExternalFenceHandleTypeFlags::default(),
fd: c_int::default(),
}
}
}
impl ImportFenceFdInfoKHR {
pub fn builder<'a>() -> ImportFenceFdInfoKHRBuilder<'a> {
ImportFenceFdInfoKHRBuilder {
inner: ImportFenceFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportFenceFdInfoKHRBuilder<'a> {
inner: ImportFenceFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImportFenceFdInfoKHR {}
impl<'a> ::std::ops::Deref for ImportFenceFdInfoKHRBuilder<'a> {
type Target = ImportFenceFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportFenceFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportFenceFdInfoKHRBuilder<'a> {
pub fn fence(mut self, fence: Fence) -> ImportFenceFdInfoKHRBuilder<'a> {
self.inner.fence = fence;
self
}
pub fn flags(mut self, flags: FenceImportFlags) -> ImportFenceFdInfoKHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalFenceHandleTypeFlags,
) -> ImportFenceFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn fd(mut self, fd: c_int) -> ImportFenceFdInfoKHRBuilder<'a> {
self.inner.fd = fd;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImportFenceFdInfoKHR>(
mut self,
next: &'a mut T,
) -> ImportFenceFdInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportFenceFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFenceGetFdInfoKHR.html>"]
pub struct FenceGetFdInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub fence: Fence,
pub handle_type: ExternalFenceHandleTypeFlags,
}
impl ::std::default::Default for FenceGetFdInfoKHR {
fn default() -> FenceGetFdInfoKHR {
FenceGetFdInfoKHR {
s_type: StructureType::FENCE_GET_FD_INFO_KHR,
p_next: ::std::ptr::null(),
fence: Fence::default(),
handle_type: ExternalFenceHandleTypeFlags::default(),
}
}
}
impl FenceGetFdInfoKHR {
pub fn builder<'a>() -> FenceGetFdInfoKHRBuilder<'a> {
FenceGetFdInfoKHRBuilder {
inner: FenceGetFdInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct FenceGetFdInfoKHRBuilder<'a> {
inner: FenceGetFdInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsFenceGetFdInfoKHR {}
impl<'a> ::std::ops::Deref for FenceGetFdInfoKHRBuilder<'a> {
type Target = FenceGetFdInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for FenceGetFdInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> FenceGetFdInfoKHRBuilder<'a> {
pub fn fence(mut self, fence: Fence) -> FenceGetFdInfoKHRBuilder<'a> {
self.inner.fence = fence;
self
}
pub fn handle_type(
mut self,
handle_type: ExternalFenceHandleTypeFlags,
) -> FenceGetFdInfoKHRBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsFenceGetFdInfoKHR>(
mut self,
next: &'a mut T,
) -> FenceGetFdInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> FenceGetFdInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMultiviewFeatures.html>"]
pub struct PhysicalDeviceMultiviewFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub multiview: Bool32,
pub multiview_geometry_shader: Bool32,
pub multiview_tessellation_shader: Bool32,
}
impl ::std::default::Default for PhysicalDeviceMultiviewFeatures {
fn default() -> PhysicalDeviceMultiviewFeatures {
PhysicalDeviceMultiviewFeatures {
s_type: StructureType::PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
p_next: ::std::ptr::null_mut(),
multiview: Bool32::default(),
multiview_geometry_shader: Bool32::default(),
multiview_tessellation_shader: Bool32::default(),
}
}
}
impl PhysicalDeviceMultiviewFeatures {
pub fn builder<'a>() -> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
PhysicalDeviceMultiviewFeaturesBuilder {
inner: PhysicalDeviceMultiviewFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMultiviewFeaturesBuilder<'a> {
inner: PhysicalDeviceMultiviewFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewFeaturesBuilder<'a> {
type Target = PhysicalDeviceMultiviewFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
pub fn multiview(mut self, multiview: bool) -> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
self.inner.multiview = multiview.into();
self
}
pub fn multiview_geometry_shader(
mut self,
multiview_geometry_shader: bool,
) -> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
self.inner.multiview_geometry_shader = multiview_geometry_shader.into();
self
}
pub fn multiview_tessellation_shader(
mut self,
multiview_tessellation_shader: bool,
) -> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
self.inner.multiview_tessellation_shader = multiview_tessellation_shader.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMultiviewFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMultiviewProperties.html>"]
pub struct PhysicalDeviceMultiviewProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_multiview_view_count: u32,
pub max_multiview_instance_index: u32,
}
impl ::std::default::Default for PhysicalDeviceMultiviewProperties {
fn default() -> PhysicalDeviceMultiviewProperties {
PhysicalDeviceMultiviewProperties {
s_type: StructureType::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
p_next: ::std::ptr::null_mut(),
max_multiview_view_count: u32::default(),
max_multiview_instance_index: u32::default(),
}
}
}
impl PhysicalDeviceMultiviewProperties {
pub fn builder<'a>() -> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
PhysicalDeviceMultiviewPropertiesBuilder {
inner: PhysicalDeviceMultiviewProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMultiviewPropertiesBuilder<'a> {
inner: PhysicalDeviceMultiviewProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewPropertiesBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewProperties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewPropertiesBuilder<'a> {
type Target = PhysicalDeviceMultiviewProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
pub fn max_multiview_view_count(
mut self,
max_multiview_view_count: u32,
) -> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
self.inner.max_multiview_view_count = max_multiview_view_count;
self
}
pub fn max_multiview_instance_index(
mut self,
max_multiview_instance_index: u32,
) -> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
self.inner.max_multiview_instance_index = max_multiview_instance_index;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMultiviewProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassMultiviewCreateInfo.html>"]
pub struct RenderPassMultiviewCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub subpass_count: u32,
pub p_view_masks: *const u32,
pub dependency_count: u32,
pub p_view_offsets: *const i32,
pub correlation_mask_count: u32,
pub p_correlation_masks: *const u32,
}
impl ::std::default::Default for RenderPassMultiviewCreateInfo {
fn default() -> RenderPassMultiviewCreateInfo {
RenderPassMultiviewCreateInfo {
s_type: StructureType::RENDER_PASS_MULTIVIEW_CREATE_INFO,
p_next: ::std::ptr::null(),
subpass_count: u32::default(),
p_view_masks: ::std::ptr::null(),
dependency_count: u32::default(),
p_view_offsets: ::std::ptr::null(),
correlation_mask_count: u32::default(),
p_correlation_masks: ::std::ptr::null(),
}
}
}
impl RenderPassMultiviewCreateInfo {
pub fn builder<'a>() -> RenderPassMultiviewCreateInfoBuilder<'a> {
RenderPassMultiviewCreateInfoBuilder {
inner: RenderPassMultiviewCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassMultiviewCreateInfoBuilder<'a> {
inner: RenderPassMultiviewCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassMultiviewCreateInfoBuilder<'_> {}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassMultiviewCreateInfo {}
impl<'a> ::std::ops::Deref for RenderPassMultiviewCreateInfoBuilder<'a> {
type Target = RenderPassMultiviewCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassMultiviewCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassMultiviewCreateInfoBuilder<'a> {
pub fn view_masks(mut self, view_masks: &'a [u32]) -> RenderPassMultiviewCreateInfoBuilder<'a> {
self.inner.subpass_count = view_masks.len() as _;
self.inner.p_view_masks = view_masks.as_ptr();
self
}
pub fn view_offsets(
mut self,
view_offsets: &'a [i32],
) -> RenderPassMultiviewCreateInfoBuilder<'a> {
self.inner.dependency_count = view_offsets.len() as _;
self.inner.p_view_offsets = view_offsets.as_ptr();
self
}
pub fn correlation_masks(
mut self,
correlation_masks: &'a [u32],
) -> RenderPassMultiviewCreateInfoBuilder<'a> {
self.inner.correlation_mask_count = correlation_masks.len() as _;
self.inner.p_correlation_masks = correlation_masks.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassMultiviewCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceCapabilities2EXT.html>"]
pub struct SurfaceCapabilities2EXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub min_image_count: u32,
pub max_image_count: u32,
pub current_extent: Extent2D,
pub min_image_extent: Extent2D,
pub max_image_extent: Extent2D,
pub max_image_array_layers: u32,
pub supported_transforms: SurfaceTransformFlagsKHR,
pub current_transform: SurfaceTransformFlagsKHR,
pub supported_composite_alpha: CompositeAlphaFlagsKHR,
pub supported_usage_flags: ImageUsageFlags,
pub supported_surface_counters: SurfaceCounterFlagsEXT,
}
impl ::std::default::Default for SurfaceCapabilities2EXT {
fn default() -> SurfaceCapabilities2EXT {
SurfaceCapabilities2EXT {
s_type: StructureType::SURFACE_CAPABILITIES_2_EXT,
p_next: ::std::ptr::null_mut(),
min_image_count: u32::default(),
max_image_count: u32::default(),
current_extent: Extent2D::default(),
min_image_extent: Extent2D::default(),
max_image_extent: Extent2D::default(),
max_image_array_layers: u32::default(),
supported_transforms: SurfaceTransformFlagsKHR::default(),
current_transform: SurfaceTransformFlagsKHR::default(),
supported_composite_alpha: CompositeAlphaFlagsKHR::default(),
supported_usage_flags: ImageUsageFlags::default(),
supported_surface_counters: SurfaceCounterFlagsEXT::default(),
}
}
}
impl SurfaceCapabilities2EXT {
pub fn builder<'a>() -> SurfaceCapabilities2EXTBuilder<'a> {
SurfaceCapabilities2EXTBuilder {
inner: SurfaceCapabilities2EXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SurfaceCapabilities2EXTBuilder<'a> {
inner: SurfaceCapabilities2EXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSurfaceCapabilities2EXT {}
impl<'a> ::std::ops::Deref for SurfaceCapabilities2EXTBuilder<'a> {
type Target = SurfaceCapabilities2EXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SurfaceCapabilities2EXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SurfaceCapabilities2EXTBuilder<'a> {
pub fn min_image_count(mut self, min_image_count: u32) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.min_image_count = min_image_count;
self
}
pub fn max_image_count(mut self, max_image_count: u32) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.max_image_count = max_image_count;
self
}
pub fn current_extent(
mut self,
current_extent: Extent2D,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.current_extent = current_extent;
self
}
pub fn min_image_extent(
mut self,
min_image_extent: Extent2D,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.min_image_extent = min_image_extent;
self
}
pub fn max_image_extent(
mut self,
max_image_extent: Extent2D,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.max_image_extent = max_image_extent;
self
}
pub fn max_image_array_layers(
mut self,
max_image_array_layers: u32,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.max_image_array_layers = max_image_array_layers;
self
}
pub fn supported_transforms(
mut self,
supported_transforms: SurfaceTransformFlagsKHR,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.supported_transforms = supported_transforms;
self
}
pub fn current_transform(
mut self,
current_transform: SurfaceTransformFlagsKHR,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.current_transform = current_transform;
self
}
pub fn supported_composite_alpha(
mut self,
supported_composite_alpha: CompositeAlphaFlagsKHR,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.supported_composite_alpha = supported_composite_alpha;
self
}
pub fn supported_usage_flags(
mut self,
supported_usage_flags: ImageUsageFlags,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.supported_usage_flags = supported_usage_flags;
self
}
pub fn supported_surface_counters(
mut self,
supported_surface_counters: SurfaceCounterFlagsEXT,
) -> SurfaceCapabilities2EXTBuilder<'a> {
self.inner.supported_surface_counters = supported_surface_counters;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSurfaceCapabilities2EXT>(
mut self,
next: &'a mut T,
) -> SurfaceCapabilities2EXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SurfaceCapabilities2EXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPowerInfoEXT.html>"]
pub struct DisplayPowerInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub power_state: DisplayPowerStateEXT,
}
impl ::std::default::Default for DisplayPowerInfoEXT {
fn default() -> DisplayPowerInfoEXT {
DisplayPowerInfoEXT {
s_type: StructureType::DISPLAY_POWER_INFO_EXT,
p_next: ::std::ptr::null(),
power_state: DisplayPowerStateEXT::default(),
}
}
}
impl DisplayPowerInfoEXT {
pub fn builder<'a>() -> DisplayPowerInfoEXTBuilder<'a> {
DisplayPowerInfoEXTBuilder {
inner: DisplayPowerInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPowerInfoEXTBuilder<'a> {
inner: DisplayPowerInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayPowerInfoEXT {}
impl<'a> ::std::ops::Deref for DisplayPowerInfoEXTBuilder<'a> {
type Target = DisplayPowerInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPowerInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPowerInfoEXTBuilder<'a> {
pub fn power_state(
mut self,
power_state: DisplayPowerStateEXT,
) -> DisplayPowerInfoEXTBuilder<'a> {
self.inner.power_state = power_state;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayPowerInfoEXT>(
mut self,
next: &'a mut T,
) -> DisplayPowerInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPowerInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceEventInfoEXT.html>"]
pub struct DeviceEventInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_event: DeviceEventTypeEXT,
}
impl ::std::default::Default for DeviceEventInfoEXT {
fn default() -> DeviceEventInfoEXT {
DeviceEventInfoEXT {
s_type: StructureType::DEVICE_EVENT_INFO_EXT,
p_next: ::std::ptr::null(),
device_event: DeviceEventTypeEXT::default(),
}
}
}
impl DeviceEventInfoEXT {
pub fn builder<'a>() -> DeviceEventInfoEXTBuilder<'a> {
DeviceEventInfoEXTBuilder {
inner: DeviceEventInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceEventInfoEXTBuilder<'a> {
inner: DeviceEventInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceEventInfoEXT {}
impl<'a> ::std::ops::Deref for DeviceEventInfoEXTBuilder<'a> {
type Target = DeviceEventInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceEventInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceEventInfoEXTBuilder<'a> {
pub fn device_event(
mut self,
device_event: DeviceEventTypeEXT,
) -> DeviceEventInfoEXTBuilder<'a> {
self.inner.device_event = device_event;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceEventInfoEXT>(
mut self,
next: &'a mut T,
) -> DeviceEventInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceEventInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayEventInfoEXT.html>"]
pub struct DisplayEventInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub display_event: DisplayEventTypeEXT,
}
impl ::std::default::Default for DisplayEventInfoEXT {
fn default() -> DisplayEventInfoEXT {
DisplayEventInfoEXT {
s_type: StructureType::DISPLAY_EVENT_INFO_EXT,
p_next: ::std::ptr::null(),
display_event: DisplayEventTypeEXT::default(),
}
}
}
impl DisplayEventInfoEXT {
pub fn builder<'a>() -> DisplayEventInfoEXTBuilder<'a> {
DisplayEventInfoEXTBuilder {
inner: DisplayEventInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayEventInfoEXTBuilder<'a> {
inner: DisplayEventInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayEventInfoEXT {}
impl<'a> ::std::ops::Deref for DisplayEventInfoEXTBuilder<'a> {
type Target = DisplayEventInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayEventInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayEventInfoEXTBuilder<'a> {
pub fn display_event(
mut self,
display_event: DisplayEventTypeEXT,
) -> DisplayEventInfoEXTBuilder<'a> {
self.inner.display_event = display_event;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayEventInfoEXT>(
mut self,
next: &'a mut T,
) -> DisplayEventInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayEventInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSwapchainCounterCreateInfoEXT.html>"]
pub struct SwapchainCounterCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub surface_counters: SurfaceCounterFlagsEXT,
}
impl ::std::default::Default for SwapchainCounterCreateInfoEXT {
fn default() -> SwapchainCounterCreateInfoEXT {
SwapchainCounterCreateInfoEXT {
s_type: StructureType::SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
surface_counters: SurfaceCounterFlagsEXT::default(),
}
}
}
impl SwapchainCounterCreateInfoEXT {
pub fn builder<'a>() -> SwapchainCounterCreateInfoEXTBuilder<'a> {
SwapchainCounterCreateInfoEXTBuilder {
inner: SwapchainCounterCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SwapchainCounterCreateInfoEXTBuilder<'a> {
inner: SwapchainCounterCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainCounterCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainCounterCreateInfoEXT {}
impl<'a> ::std::ops::Deref for SwapchainCounterCreateInfoEXTBuilder<'a> {
type Target = SwapchainCounterCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SwapchainCounterCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SwapchainCounterCreateInfoEXTBuilder<'a> {
pub fn surface_counters(
mut self,
surface_counters: SurfaceCounterFlagsEXT,
) -> SwapchainCounterCreateInfoEXTBuilder<'a> {
self.inner.surface_counters = surface_counters;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SwapchainCounterCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceGroupProperties.html>"]
pub struct PhysicalDeviceGroupProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub physical_device_count: u32,
pub physical_devices: [PhysicalDevice; MAX_DEVICE_GROUP_SIZE],
pub subset_allocation: Bool32,
}
impl ::std::default::Default for PhysicalDeviceGroupProperties {
fn default() -> PhysicalDeviceGroupProperties {
PhysicalDeviceGroupProperties {
s_type: StructureType::PHYSICAL_DEVICE_GROUP_PROPERTIES,
p_next: ::std::ptr::null_mut(),
physical_device_count: u32::default(),
physical_devices: unsafe { ::std::mem::zeroed() },
subset_allocation: Bool32::default(),
}
}
}
impl PhysicalDeviceGroupProperties {
pub fn builder<'a>() -> PhysicalDeviceGroupPropertiesBuilder<'a> {
PhysicalDeviceGroupPropertiesBuilder {
inner: PhysicalDeviceGroupProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceGroupPropertiesBuilder<'a> {
inner: PhysicalDeviceGroupProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceGroupProperties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceGroupPropertiesBuilder<'a> {
type Target = PhysicalDeviceGroupProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceGroupPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceGroupPropertiesBuilder<'a> {
pub fn physical_device_count(
mut self,
physical_device_count: u32,
) -> PhysicalDeviceGroupPropertiesBuilder<'a> {
self.inner.physical_device_count = physical_device_count;
self
}
pub fn physical_devices(
mut self,
physical_devices: [PhysicalDevice; MAX_DEVICE_GROUP_SIZE],
) -> PhysicalDeviceGroupPropertiesBuilder<'a> {
self.inner.physical_devices = physical_devices;
self
}
pub fn subset_allocation(
mut self,
subset_allocation: bool,
) -> PhysicalDeviceGroupPropertiesBuilder<'a> {
self.inner.subset_allocation = subset_allocation.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceGroupProperties>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceGroupPropertiesBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceGroupProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryAllocateFlagsInfo.html>"]
pub struct MemoryAllocateFlagsInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: MemoryAllocateFlags,
pub device_mask: u32,
}
impl ::std::default::Default for MemoryAllocateFlagsInfo {
fn default() -> MemoryAllocateFlagsInfo {
MemoryAllocateFlagsInfo {
s_type: StructureType::MEMORY_ALLOCATE_FLAGS_INFO,
p_next: ::std::ptr::null(),
flags: MemoryAllocateFlags::default(),
device_mask: u32::default(),
}
}
}
impl MemoryAllocateFlagsInfo {
pub fn builder<'a>() -> MemoryAllocateFlagsInfoBuilder<'a> {
MemoryAllocateFlagsInfoBuilder {
inner: MemoryAllocateFlagsInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryAllocateFlagsInfoBuilder<'a> {
inner: MemoryAllocateFlagsInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for MemoryAllocateFlagsInfoBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for MemoryAllocateFlagsInfo {}
impl<'a> ::std::ops::Deref for MemoryAllocateFlagsInfoBuilder<'a> {
type Target = MemoryAllocateFlagsInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryAllocateFlagsInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryAllocateFlagsInfoBuilder<'a> {
pub fn flags(mut self, flags: MemoryAllocateFlags) -> MemoryAllocateFlagsInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn device_mask(mut self, device_mask: u32) -> MemoryAllocateFlagsInfoBuilder<'a> {
self.inner.device_mask = device_mask;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryAllocateFlagsInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindBufferMemoryInfo.html>"]
pub struct BindBufferMemoryInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub buffer: Buffer,
pub memory: DeviceMemory,
pub memory_offset: DeviceSize,
}
impl ::std::default::Default for BindBufferMemoryInfo {
fn default() -> BindBufferMemoryInfo {
BindBufferMemoryInfo {
s_type: StructureType::BIND_BUFFER_MEMORY_INFO,
p_next: ::std::ptr::null(),
buffer: Buffer::default(),
memory: DeviceMemory::default(),
memory_offset: DeviceSize::default(),
}
}
}
impl BindBufferMemoryInfo {
pub fn builder<'a>() -> BindBufferMemoryInfoBuilder<'a> {
BindBufferMemoryInfoBuilder {
inner: BindBufferMemoryInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindBufferMemoryInfoBuilder<'a> {
inner: BindBufferMemoryInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBindBufferMemoryInfo {}
impl<'a> ::std::ops::Deref for BindBufferMemoryInfoBuilder<'a> {
type Target = BindBufferMemoryInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindBufferMemoryInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindBufferMemoryInfoBuilder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> BindBufferMemoryInfoBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn memory(mut self, memory: DeviceMemory) -> BindBufferMemoryInfoBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn memory_offset(mut self, memory_offset: DeviceSize) -> BindBufferMemoryInfoBuilder<'a> {
self.inner.memory_offset = memory_offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBindBufferMemoryInfo>(
mut self,
next: &'a mut T,
) -> BindBufferMemoryInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindBufferMemoryInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindBufferMemoryDeviceGroupInfo.html>"]
pub struct BindBufferMemoryDeviceGroupInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_index_count: u32,
pub p_device_indices: *const u32,
}
impl ::std::default::Default for BindBufferMemoryDeviceGroupInfo {
fn default() -> BindBufferMemoryDeviceGroupInfo {
BindBufferMemoryDeviceGroupInfo {
s_type: StructureType::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
p_next: ::std::ptr::null(),
device_index_count: u32::default(),
p_device_indices: ::std::ptr::null(),
}
}
}
impl BindBufferMemoryDeviceGroupInfo {
pub fn builder<'a>() -> BindBufferMemoryDeviceGroupInfoBuilder<'a> {
BindBufferMemoryDeviceGroupInfoBuilder {
inner: BindBufferMemoryDeviceGroupInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindBufferMemoryDeviceGroupInfoBuilder<'a> {
inner: BindBufferMemoryDeviceGroupInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBindBufferMemoryInfo for BindBufferMemoryDeviceGroupInfoBuilder<'_> {}
unsafe impl ExtendsBindBufferMemoryInfo for BindBufferMemoryDeviceGroupInfo {}
impl<'a> ::std::ops::Deref for BindBufferMemoryDeviceGroupInfoBuilder<'a> {
type Target = BindBufferMemoryDeviceGroupInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindBufferMemoryDeviceGroupInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindBufferMemoryDeviceGroupInfoBuilder<'a> {
pub fn device_indices(
mut self,
device_indices: &'a [u32],
) -> BindBufferMemoryDeviceGroupInfoBuilder<'a> {
self.inner.device_index_count = device_indices.len() as _;
self.inner.p_device_indices = device_indices.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindBufferMemoryDeviceGroupInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindImageMemoryInfo.html>"]
pub struct BindImageMemoryInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub image: Image,
pub memory: DeviceMemory,
pub memory_offset: DeviceSize,
}
impl ::std::default::Default for BindImageMemoryInfo {
fn default() -> BindImageMemoryInfo {
BindImageMemoryInfo {
s_type: StructureType::BIND_IMAGE_MEMORY_INFO,
p_next: ::std::ptr::null(),
image: Image::default(),
memory: DeviceMemory::default(),
memory_offset: DeviceSize::default(),
}
}
}
impl BindImageMemoryInfo {
pub fn builder<'a>() -> BindImageMemoryInfoBuilder<'a> {
BindImageMemoryInfoBuilder {
inner: BindImageMemoryInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindImageMemoryInfoBuilder<'a> {
inner: BindImageMemoryInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBindImageMemoryInfo {}
impl<'a> ::std::ops::Deref for BindImageMemoryInfoBuilder<'a> {
type Target = BindImageMemoryInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindImageMemoryInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindImageMemoryInfoBuilder<'a> {
pub fn image(mut self, image: Image) -> BindImageMemoryInfoBuilder<'a> {
self.inner.image = image;
self
}
pub fn memory(mut self, memory: DeviceMemory) -> BindImageMemoryInfoBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn memory_offset(mut self, memory_offset: DeviceSize) -> BindImageMemoryInfoBuilder<'a> {
self.inner.memory_offset = memory_offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBindImageMemoryInfo>(
mut self,
next: &'a mut T,
) -> BindImageMemoryInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindImageMemoryInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindImageMemoryDeviceGroupInfo.html>"]
pub struct BindImageMemoryDeviceGroupInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_index_count: u32,
pub p_device_indices: *const u32,
pub split_instance_bind_region_count: u32,
pub p_split_instance_bind_regions: *const Rect2D,
}
impl ::std::default::Default for BindImageMemoryDeviceGroupInfo {
fn default() -> BindImageMemoryDeviceGroupInfo {
BindImageMemoryDeviceGroupInfo {
s_type: StructureType::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
p_next: ::std::ptr::null(),
device_index_count: u32::default(),
p_device_indices: ::std::ptr::null(),
split_instance_bind_region_count: u32::default(),
p_split_instance_bind_regions: ::std::ptr::null(),
}
}
}
impl BindImageMemoryDeviceGroupInfo {
pub fn builder<'a>() -> BindImageMemoryDeviceGroupInfoBuilder<'a> {
BindImageMemoryDeviceGroupInfoBuilder {
inner: BindImageMemoryDeviceGroupInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindImageMemoryDeviceGroupInfoBuilder<'a> {
inner: BindImageMemoryDeviceGroupInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBindImageMemoryInfo for BindImageMemoryDeviceGroupInfoBuilder<'_> {}
unsafe impl ExtendsBindImageMemoryInfo for BindImageMemoryDeviceGroupInfo {}
impl<'a> ::std::ops::Deref for BindImageMemoryDeviceGroupInfoBuilder<'a> {
type Target = BindImageMemoryDeviceGroupInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindImageMemoryDeviceGroupInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindImageMemoryDeviceGroupInfoBuilder<'a> {
pub fn device_indices(
mut self,
device_indices: &'a [u32],
) -> BindImageMemoryDeviceGroupInfoBuilder<'a> {
self.inner.device_index_count = device_indices.len() as _;
self.inner.p_device_indices = device_indices.as_ptr();
self
}
pub fn split_instance_bind_regions(
mut self,
split_instance_bind_regions: &'a [Rect2D],
) -> BindImageMemoryDeviceGroupInfoBuilder<'a> {
self.inner.split_instance_bind_region_count = split_instance_bind_regions.len() as _;
self.inner.p_split_instance_bind_regions = split_instance_bind_regions.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindImageMemoryDeviceGroupInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupRenderPassBeginInfo.html>"]
pub struct DeviceGroupRenderPassBeginInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_mask: u32,
pub device_render_area_count: u32,
pub p_device_render_areas: *const Rect2D,
}
impl ::std::default::Default for DeviceGroupRenderPassBeginInfo {
fn default() -> DeviceGroupRenderPassBeginInfo {
DeviceGroupRenderPassBeginInfo {
s_type: StructureType::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
p_next: ::std::ptr::null(),
device_mask: u32::default(),
device_render_area_count: u32::default(),
p_device_render_areas: ::std::ptr::null(),
}
}
}
impl DeviceGroupRenderPassBeginInfo {
pub fn builder<'a>() -> DeviceGroupRenderPassBeginInfoBuilder<'a> {
DeviceGroupRenderPassBeginInfoBuilder {
inner: DeviceGroupRenderPassBeginInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupRenderPassBeginInfoBuilder<'a> {
inner: DeviceGroupRenderPassBeginInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsRenderPassBeginInfo for DeviceGroupRenderPassBeginInfoBuilder<'_> {}
unsafe impl ExtendsRenderPassBeginInfo for DeviceGroupRenderPassBeginInfo {}
impl<'a> ::std::ops::Deref for DeviceGroupRenderPassBeginInfoBuilder<'a> {
type Target = DeviceGroupRenderPassBeginInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupRenderPassBeginInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupRenderPassBeginInfoBuilder<'a> {
pub fn device_mask(mut self, device_mask: u32) -> DeviceGroupRenderPassBeginInfoBuilder<'a> {
self.inner.device_mask = device_mask;
self
}
pub fn device_render_areas(
mut self,
device_render_areas: &'a [Rect2D],
) -> DeviceGroupRenderPassBeginInfoBuilder<'a> {
self.inner.device_render_area_count = device_render_areas.len() as _;
self.inner.p_device_render_areas = device_render_areas.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupRenderPassBeginInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupCommandBufferBeginInfo.html>"]
pub struct DeviceGroupCommandBufferBeginInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_mask: u32,
}
impl ::std::default::Default for DeviceGroupCommandBufferBeginInfo {
fn default() -> DeviceGroupCommandBufferBeginInfo {
DeviceGroupCommandBufferBeginInfo {
s_type: StructureType::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
p_next: ::std::ptr::null(),
device_mask: u32::default(),
}
}
}
impl DeviceGroupCommandBufferBeginInfo {
pub fn builder<'a>() -> DeviceGroupCommandBufferBeginInfoBuilder<'a> {
DeviceGroupCommandBufferBeginInfoBuilder {
inner: DeviceGroupCommandBufferBeginInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupCommandBufferBeginInfoBuilder<'a> {
inner: DeviceGroupCommandBufferBeginInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsCommandBufferBeginInfo for DeviceGroupCommandBufferBeginInfoBuilder<'_> {}
unsafe impl ExtendsCommandBufferBeginInfo for DeviceGroupCommandBufferBeginInfo {}
impl<'a> ::std::ops::Deref for DeviceGroupCommandBufferBeginInfoBuilder<'a> {
type Target = DeviceGroupCommandBufferBeginInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupCommandBufferBeginInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupCommandBufferBeginInfoBuilder<'a> {
pub fn device_mask(mut self, device_mask: u32) -> DeviceGroupCommandBufferBeginInfoBuilder<'a> {
self.inner.device_mask = device_mask;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupCommandBufferBeginInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupSubmitInfo.html>"]
pub struct DeviceGroupSubmitInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub wait_semaphore_count: u32,
pub p_wait_semaphore_device_indices: *const u32,
pub command_buffer_count: u32,
pub p_command_buffer_device_masks: *const u32,
pub signal_semaphore_count: u32,
pub p_signal_semaphore_device_indices: *const u32,
}
impl ::std::default::Default for DeviceGroupSubmitInfo {
fn default() -> DeviceGroupSubmitInfo {
DeviceGroupSubmitInfo {
s_type: StructureType::DEVICE_GROUP_SUBMIT_INFO,
p_next: ::std::ptr::null(),
wait_semaphore_count: u32::default(),
p_wait_semaphore_device_indices: ::std::ptr::null(),
command_buffer_count: u32::default(),
p_command_buffer_device_masks: ::std::ptr::null(),
signal_semaphore_count: u32::default(),
p_signal_semaphore_device_indices: ::std::ptr::null(),
}
}
}
impl DeviceGroupSubmitInfo {
pub fn builder<'a>() -> DeviceGroupSubmitInfoBuilder<'a> {
DeviceGroupSubmitInfoBuilder {
inner: DeviceGroupSubmitInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupSubmitInfoBuilder<'a> {
inner: DeviceGroupSubmitInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubmitInfo for DeviceGroupSubmitInfoBuilder<'_> {}
unsafe impl ExtendsSubmitInfo for DeviceGroupSubmitInfo {}
impl<'a> ::std::ops::Deref for DeviceGroupSubmitInfoBuilder<'a> {
type Target = DeviceGroupSubmitInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupSubmitInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupSubmitInfoBuilder<'a> {
pub fn wait_semaphore_device_indices(
mut self,
wait_semaphore_device_indices: &'a [u32],
) -> DeviceGroupSubmitInfoBuilder<'a> {
self.inner.wait_semaphore_count = wait_semaphore_device_indices.len() as _;
self.inner.p_wait_semaphore_device_indices = wait_semaphore_device_indices.as_ptr();
self
}
pub fn command_buffer_device_masks(
mut self,
command_buffer_device_masks: &'a [u32],
) -> DeviceGroupSubmitInfoBuilder<'a> {
self.inner.command_buffer_count = command_buffer_device_masks.len() as _;
self.inner.p_command_buffer_device_masks = command_buffer_device_masks.as_ptr();
self
}
pub fn signal_semaphore_device_indices(
mut self,
signal_semaphore_device_indices: &'a [u32],
) -> DeviceGroupSubmitInfoBuilder<'a> {
self.inner.signal_semaphore_count = signal_semaphore_device_indices.len() as _;
self.inner.p_signal_semaphore_device_indices = signal_semaphore_device_indices.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupSubmitInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupBindSparseInfo.html>"]
pub struct DeviceGroupBindSparseInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub resource_device_index: u32,
pub memory_device_index: u32,
}
impl ::std::default::Default for DeviceGroupBindSparseInfo {
fn default() -> DeviceGroupBindSparseInfo {
DeviceGroupBindSparseInfo {
s_type: StructureType::DEVICE_GROUP_BIND_SPARSE_INFO,
p_next: ::std::ptr::null(),
resource_device_index: u32::default(),
memory_device_index: u32::default(),
}
}
}
impl DeviceGroupBindSparseInfo {
pub fn builder<'a>() -> DeviceGroupBindSparseInfoBuilder<'a> {
DeviceGroupBindSparseInfoBuilder {
inner: DeviceGroupBindSparseInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupBindSparseInfoBuilder<'a> {
inner: DeviceGroupBindSparseInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBindSparseInfo for DeviceGroupBindSparseInfoBuilder<'_> {}
unsafe impl ExtendsBindSparseInfo for DeviceGroupBindSparseInfo {}
impl<'a> ::std::ops::Deref for DeviceGroupBindSparseInfoBuilder<'a> {
type Target = DeviceGroupBindSparseInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupBindSparseInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupBindSparseInfoBuilder<'a> {
pub fn resource_device_index(
mut self,
resource_device_index: u32,
) -> DeviceGroupBindSparseInfoBuilder<'a> {
self.inner.resource_device_index = resource_device_index;
self
}
pub fn memory_device_index(
mut self,
memory_device_index: u32,
) -> DeviceGroupBindSparseInfoBuilder<'a> {
self.inner.memory_device_index = memory_device_index;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupBindSparseInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupPresentCapabilitiesKHR.html>"]
pub struct DeviceGroupPresentCapabilitiesKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub present_mask: [u32; MAX_DEVICE_GROUP_SIZE],
pub modes: DeviceGroupPresentModeFlagsKHR,
}
impl ::std::default::Default for DeviceGroupPresentCapabilitiesKHR {
fn default() -> DeviceGroupPresentCapabilitiesKHR {
DeviceGroupPresentCapabilitiesKHR {
s_type: StructureType::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
p_next: ::std::ptr::null(),
present_mask: unsafe { ::std::mem::zeroed() },
modes: DeviceGroupPresentModeFlagsKHR::default(),
}
}
}
impl DeviceGroupPresentCapabilitiesKHR {
pub fn builder<'a>() -> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
DeviceGroupPresentCapabilitiesKHRBuilder {
inner: DeviceGroupPresentCapabilitiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
inner: DeviceGroupPresentCapabilitiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceGroupPresentCapabilitiesKHR {}
impl<'a> ::std::ops::Deref for DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
type Target = DeviceGroupPresentCapabilitiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
pub fn present_mask(
mut self,
present_mask: [u32; MAX_DEVICE_GROUP_SIZE],
) -> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
self.inner.present_mask = present_mask;
self
}
pub fn modes(
mut self,
modes: DeviceGroupPresentModeFlagsKHR,
) -> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
self.inner.modes = modes;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceGroupPresentCapabilitiesKHR>(
mut self,
next: &'a mut T,
) -> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupPresentCapabilitiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageSwapchainCreateInfoKHR.html>"]
pub struct ImageSwapchainCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain: SwapchainKHR,
}
impl ::std::default::Default for ImageSwapchainCreateInfoKHR {
fn default() -> ImageSwapchainCreateInfoKHR {
ImageSwapchainCreateInfoKHR {
s_type: StructureType::IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
swapchain: SwapchainKHR::default(),
}
}
}
impl ImageSwapchainCreateInfoKHR {
pub fn builder<'a>() -> ImageSwapchainCreateInfoKHRBuilder<'a> {
ImageSwapchainCreateInfoKHRBuilder {
inner: ImageSwapchainCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageSwapchainCreateInfoKHRBuilder<'a> {
inner: ImageSwapchainCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ImageSwapchainCreateInfoKHRBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ImageSwapchainCreateInfoKHR {}
impl<'a> ::std::ops::Deref for ImageSwapchainCreateInfoKHRBuilder<'a> {
type Target = ImageSwapchainCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageSwapchainCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageSwapchainCreateInfoKHRBuilder<'a> {
pub fn swapchain(mut self, swapchain: SwapchainKHR) -> ImageSwapchainCreateInfoKHRBuilder<'a> {
self.inner.swapchain = swapchain;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageSwapchainCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindImageMemorySwapchainInfoKHR.html>"]
pub struct BindImageMemorySwapchainInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain: SwapchainKHR,
pub image_index: u32,
}
impl ::std::default::Default for BindImageMemorySwapchainInfoKHR {
fn default() -> BindImageMemorySwapchainInfoKHR {
BindImageMemorySwapchainInfoKHR {
s_type: StructureType::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
p_next: ::std::ptr::null(),
swapchain: SwapchainKHR::default(),
image_index: u32::default(),
}
}
}
impl BindImageMemorySwapchainInfoKHR {
pub fn builder<'a>() -> BindImageMemorySwapchainInfoKHRBuilder<'a> {
BindImageMemorySwapchainInfoKHRBuilder {
inner: BindImageMemorySwapchainInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindImageMemorySwapchainInfoKHRBuilder<'a> {
inner: BindImageMemorySwapchainInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBindImageMemoryInfo for BindImageMemorySwapchainInfoKHRBuilder<'_> {}
unsafe impl ExtendsBindImageMemoryInfo for BindImageMemorySwapchainInfoKHR {}
impl<'a> ::std::ops::Deref for BindImageMemorySwapchainInfoKHRBuilder<'a> {
type Target = BindImageMemorySwapchainInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindImageMemorySwapchainInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindImageMemorySwapchainInfoKHRBuilder<'a> {
pub fn swapchain(
mut self,
swapchain: SwapchainKHR,
) -> BindImageMemorySwapchainInfoKHRBuilder<'a> {
self.inner.swapchain = swapchain;
self
}
pub fn image_index(mut self, image_index: u32) -> BindImageMemorySwapchainInfoKHRBuilder<'a> {
self.inner.image_index = image_index;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindImageMemorySwapchainInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAcquireNextImageInfoKHR.html>"]
pub struct AcquireNextImageInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain: SwapchainKHR,
pub timeout: u64,
pub semaphore: Semaphore,
pub fence: Fence,
pub device_mask: u32,
}
impl ::std::default::Default for AcquireNextImageInfoKHR {
fn default() -> AcquireNextImageInfoKHR {
AcquireNextImageInfoKHR {
s_type: StructureType::ACQUIRE_NEXT_IMAGE_INFO_KHR,
p_next: ::std::ptr::null(),
swapchain: SwapchainKHR::default(),
timeout: u64::default(),
semaphore: Semaphore::default(),
fence: Fence::default(),
device_mask: u32::default(),
}
}
}
impl AcquireNextImageInfoKHR {
pub fn builder<'a>() -> AcquireNextImageInfoKHRBuilder<'a> {
AcquireNextImageInfoKHRBuilder {
inner: AcquireNextImageInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AcquireNextImageInfoKHRBuilder<'a> {
inner: AcquireNextImageInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAcquireNextImageInfoKHR {}
impl<'a> ::std::ops::Deref for AcquireNextImageInfoKHRBuilder<'a> {
type Target = AcquireNextImageInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AcquireNextImageInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AcquireNextImageInfoKHRBuilder<'a> {
pub fn swapchain(mut self, swapchain: SwapchainKHR) -> AcquireNextImageInfoKHRBuilder<'a> {
self.inner.swapchain = swapchain;
self
}
pub fn timeout(mut self, timeout: u64) -> AcquireNextImageInfoKHRBuilder<'a> {
self.inner.timeout = timeout;
self
}
pub fn semaphore(mut self, semaphore: Semaphore) -> AcquireNextImageInfoKHRBuilder<'a> {
self.inner.semaphore = semaphore;
self
}
pub fn fence(mut self, fence: Fence) -> AcquireNextImageInfoKHRBuilder<'a> {
self.inner.fence = fence;
self
}
pub fn device_mask(mut self, device_mask: u32) -> AcquireNextImageInfoKHRBuilder<'a> {
self.inner.device_mask = device_mask;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAcquireNextImageInfoKHR>(
mut self,
next: &'a mut T,
) -> AcquireNextImageInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AcquireNextImageInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupPresentInfoKHR.html>"]
pub struct DeviceGroupPresentInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain_count: u32,
pub p_device_masks: *const u32,
pub mode: DeviceGroupPresentModeFlagsKHR,
}
impl ::std::default::Default for DeviceGroupPresentInfoKHR {
fn default() -> DeviceGroupPresentInfoKHR {
DeviceGroupPresentInfoKHR {
s_type: StructureType::DEVICE_GROUP_PRESENT_INFO_KHR,
p_next: ::std::ptr::null(),
swapchain_count: u32::default(),
p_device_masks: ::std::ptr::null(),
mode: DeviceGroupPresentModeFlagsKHR::default(),
}
}
}
impl DeviceGroupPresentInfoKHR {
pub fn builder<'a>() -> DeviceGroupPresentInfoKHRBuilder<'a> {
DeviceGroupPresentInfoKHRBuilder {
inner: DeviceGroupPresentInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupPresentInfoKHRBuilder<'a> {
inner: DeviceGroupPresentInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPresentInfoKHR for DeviceGroupPresentInfoKHRBuilder<'_> {}
unsafe impl ExtendsPresentInfoKHR for DeviceGroupPresentInfoKHR {}
impl<'a> ::std::ops::Deref for DeviceGroupPresentInfoKHRBuilder<'a> {
type Target = DeviceGroupPresentInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupPresentInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupPresentInfoKHRBuilder<'a> {
pub fn device_masks(mut self, device_masks: &'a [u32]) -> DeviceGroupPresentInfoKHRBuilder<'a> {
self.inner.swapchain_count = device_masks.len() as _;
self.inner.p_device_masks = device_masks.as_ptr();
self
}
pub fn mode(
mut self,
mode: DeviceGroupPresentModeFlagsKHR,
) -> DeviceGroupPresentInfoKHRBuilder<'a> {
self.inner.mode = mode;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupPresentInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupDeviceCreateInfo.html>"]
pub struct DeviceGroupDeviceCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub physical_device_count: u32,
pub p_physical_devices: *const PhysicalDevice,
}
impl ::std::default::Default for DeviceGroupDeviceCreateInfo {
fn default() -> DeviceGroupDeviceCreateInfo {
DeviceGroupDeviceCreateInfo {
s_type: StructureType::DEVICE_GROUP_DEVICE_CREATE_INFO,
p_next: ::std::ptr::null(),
physical_device_count: u32::default(),
p_physical_devices: ::std::ptr::null(),
}
}
}
impl DeviceGroupDeviceCreateInfo {
pub fn builder<'a>() -> DeviceGroupDeviceCreateInfoBuilder<'a> {
DeviceGroupDeviceCreateInfoBuilder {
inner: DeviceGroupDeviceCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupDeviceCreateInfoBuilder<'a> {
inner: DeviceGroupDeviceCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for DeviceGroupDeviceCreateInfoBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for DeviceGroupDeviceCreateInfo {}
impl<'a> ::std::ops::Deref for DeviceGroupDeviceCreateInfoBuilder<'a> {
type Target = DeviceGroupDeviceCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupDeviceCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupDeviceCreateInfoBuilder<'a> {
pub fn physical_devices(
mut self,
physical_devices: &'a [PhysicalDevice],
) -> DeviceGroupDeviceCreateInfoBuilder<'a> {
self.inner.physical_device_count = physical_devices.len() as _;
self.inner.p_physical_devices = physical_devices.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupDeviceCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html>"]
pub struct DeviceGroupSwapchainCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub modes: DeviceGroupPresentModeFlagsKHR,
}
impl ::std::default::Default for DeviceGroupSwapchainCreateInfoKHR {
fn default() -> DeviceGroupSwapchainCreateInfoKHR {
DeviceGroupSwapchainCreateInfoKHR {
s_type: StructureType::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
modes: DeviceGroupPresentModeFlagsKHR::default(),
}
}
}
impl DeviceGroupSwapchainCreateInfoKHR {
pub fn builder<'a>() -> DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
DeviceGroupSwapchainCreateInfoKHRBuilder {
inner: DeviceGroupSwapchainCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
inner: DeviceGroupSwapchainCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSwapchainCreateInfoKHR for DeviceGroupSwapchainCreateInfoKHRBuilder<'_> {}
unsafe impl ExtendsSwapchainCreateInfoKHR for DeviceGroupSwapchainCreateInfoKHR {}
impl<'a> ::std::ops::Deref for DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
type Target = DeviceGroupSwapchainCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
pub fn modes(
mut self,
modes: DeviceGroupPresentModeFlagsKHR,
) -> DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
self.inner.modes = modes;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceGroupSwapchainCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorUpdateTemplateEntry.html>"]
pub struct DescriptorUpdateTemplateEntry {
pub dst_binding: u32,
pub dst_array_element: u32,
pub descriptor_count: u32,
pub descriptor_type: DescriptorType,
pub offset: usize,
pub stride: usize,
}
impl DescriptorUpdateTemplateEntry {
pub fn builder<'a>() -> DescriptorUpdateTemplateEntryBuilder<'a> {
DescriptorUpdateTemplateEntryBuilder {
inner: DescriptorUpdateTemplateEntry::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorUpdateTemplateEntryBuilder<'a> {
inner: DescriptorUpdateTemplateEntry,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DescriptorUpdateTemplateEntryBuilder<'a> {
type Target = DescriptorUpdateTemplateEntry;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorUpdateTemplateEntryBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorUpdateTemplateEntryBuilder<'a> {
pub fn dst_binding(mut self, dst_binding: u32) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.dst_binding = dst_binding;
self
}
pub fn dst_array_element(
mut self,
dst_array_element: u32,
) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.dst_array_element = dst_array_element;
self
}
pub fn descriptor_count(
mut self,
descriptor_count: u32,
) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.descriptor_count = descriptor_count;
self
}
pub fn descriptor_type(
mut self,
descriptor_type: DescriptorType,
) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.descriptor_type = descriptor_type;
self
}
pub fn offset(mut self, offset: usize) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn stride(mut self, stride: usize) -> DescriptorUpdateTemplateEntryBuilder<'a> {
self.inner.stride = stride;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorUpdateTemplateEntry {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorUpdateTemplateCreateInfo.html>"]
pub struct DescriptorUpdateTemplateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DescriptorUpdateTemplateCreateFlags,
pub descriptor_update_entry_count: u32,
pub p_descriptor_update_entries: *const DescriptorUpdateTemplateEntry,
pub template_type: DescriptorUpdateTemplateType,
pub descriptor_set_layout: DescriptorSetLayout,
pub pipeline_bind_point: PipelineBindPoint,
pub pipeline_layout: PipelineLayout,
pub set: u32,
}
impl ::std::default::Default for DescriptorUpdateTemplateCreateInfo {
fn default() -> DescriptorUpdateTemplateCreateInfo {
DescriptorUpdateTemplateCreateInfo {
s_type: StructureType::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
p_next: ::std::ptr::null(),
flags: DescriptorUpdateTemplateCreateFlags::default(),
descriptor_update_entry_count: u32::default(),
p_descriptor_update_entries: ::std::ptr::null(),
template_type: DescriptorUpdateTemplateType::default(),
descriptor_set_layout: DescriptorSetLayout::default(),
pipeline_bind_point: PipelineBindPoint::default(),
pipeline_layout: PipelineLayout::default(),
set: u32::default(),
}
}
}
impl DescriptorUpdateTemplateCreateInfo {
pub fn builder<'a>() -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
DescriptorUpdateTemplateCreateInfoBuilder {
inner: DescriptorUpdateTemplateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorUpdateTemplateCreateInfoBuilder<'a> {
inner: DescriptorUpdateTemplateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDescriptorUpdateTemplateCreateInfo {}
impl<'a> ::std::ops::Deref for DescriptorUpdateTemplateCreateInfoBuilder<'a> {
type Target = DescriptorUpdateTemplateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorUpdateTemplateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
pub fn flags(
mut self,
flags: DescriptorUpdateTemplateCreateFlags,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn descriptor_update_entries(
mut self,
descriptor_update_entries: &'a [DescriptorUpdateTemplateEntry],
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.descriptor_update_entry_count = descriptor_update_entries.len() as _;
self.inner.p_descriptor_update_entries = descriptor_update_entries.as_ptr();
self
}
pub fn template_type(
mut self,
template_type: DescriptorUpdateTemplateType,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.template_type = template_type;
self
}
pub fn descriptor_set_layout(
mut self,
descriptor_set_layout: DescriptorSetLayout,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.descriptor_set_layout = descriptor_set_layout;
self
}
pub fn pipeline_bind_point(
mut self,
pipeline_bind_point: PipelineBindPoint,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.pipeline_bind_point = pipeline_bind_point;
self
}
pub fn pipeline_layout(
mut self,
pipeline_layout: PipelineLayout,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.pipeline_layout = pipeline_layout;
self
}
pub fn set(mut self, set: u32) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
self.inner.set = set;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDescriptorUpdateTemplateCreateInfo>(
mut self,
next: &'a mut T,
) -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorUpdateTemplateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkXYColorEXT.html>"]
pub struct XYColorEXT {
pub x: f32,
pub y: f32,
}
impl XYColorEXT {
pub fn builder<'a>() -> XYColorEXTBuilder<'a> {
XYColorEXTBuilder {
inner: XYColorEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct XYColorEXTBuilder<'a> {
inner: XYColorEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for XYColorEXTBuilder<'a> {
type Target = XYColorEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for XYColorEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> XYColorEXTBuilder<'a> {
pub fn x(mut self, x: f32) -> XYColorEXTBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: f32) -> XYColorEXTBuilder<'a> {
self.inner.y = y;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> XYColorEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkHdrMetadataEXT.html>"]
pub struct HdrMetadataEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub display_primary_red: XYColorEXT,
pub display_primary_green: XYColorEXT,
pub display_primary_blue: XYColorEXT,
pub white_point: XYColorEXT,
pub max_luminance: f32,
pub min_luminance: f32,
pub max_content_light_level: f32,
pub max_frame_average_light_level: f32,
}
impl ::std::default::Default for HdrMetadataEXT {
fn default() -> HdrMetadataEXT {
HdrMetadataEXT {
s_type: StructureType::HDR_METADATA_EXT,
p_next: ::std::ptr::null(),
display_primary_red: XYColorEXT::default(),
display_primary_green: XYColorEXT::default(),
display_primary_blue: XYColorEXT::default(),
white_point: XYColorEXT::default(),
max_luminance: f32::default(),
min_luminance: f32::default(),
max_content_light_level: f32::default(),
max_frame_average_light_level: f32::default(),
}
}
}
impl HdrMetadataEXT {
pub fn builder<'a>() -> HdrMetadataEXTBuilder<'a> {
HdrMetadataEXTBuilder {
inner: HdrMetadataEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct HdrMetadataEXTBuilder<'a> {
inner: HdrMetadataEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsHdrMetadataEXT {}
impl<'a> ::std::ops::Deref for HdrMetadataEXTBuilder<'a> {
type Target = HdrMetadataEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for HdrMetadataEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> HdrMetadataEXTBuilder<'a> {
pub fn display_primary_red(
mut self,
display_primary_red: XYColorEXT,
) -> HdrMetadataEXTBuilder<'a> {
self.inner.display_primary_red = display_primary_red;
self
}
pub fn display_primary_green(
mut self,
display_primary_green: XYColorEXT,
) -> HdrMetadataEXTBuilder<'a> {
self.inner.display_primary_green = display_primary_green;
self
}
pub fn display_primary_blue(
mut self,
display_primary_blue: XYColorEXT,
) -> HdrMetadataEXTBuilder<'a> {
self.inner.display_primary_blue = display_primary_blue;
self
}
pub fn white_point(mut self, white_point: XYColorEXT) -> HdrMetadataEXTBuilder<'a> {
self.inner.white_point = white_point;
self
}
pub fn max_luminance(mut self, max_luminance: f32) -> HdrMetadataEXTBuilder<'a> {
self.inner.max_luminance = max_luminance;
self
}
pub fn min_luminance(mut self, min_luminance: f32) -> HdrMetadataEXTBuilder<'a> {
self.inner.min_luminance = min_luminance;
self
}
pub fn max_content_light_level(
mut self,
max_content_light_level: f32,
) -> HdrMetadataEXTBuilder<'a> {
self.inner.max_content_light_level = max_content_light_level;
self
}
pub fn max_frame_average_light_level(
mut self,
max_frame_average_light_level: f32,
) -> HdrMetadataEXTBuilder<'a> {
self.inner.max_frame_average_light_level = max_frame_average_light_level;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsHdrMetadataEXT>(
mut self,
next: &'a mut T,
) -> HdrMetadataEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> HdrMetadataEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRefreshCycleDurationGOOGLE.html>"]
pub struct RefreshCycleDurationGOOGLE {
pub refresh_duration: u64,
}
impl RefreshCycleDurationGOOGLE {
pub fn builder<'a>() -> RefreshCycleDurationGOOGLEBuilder<'a> {
RefreshCycleDurationGOOGLEBuilder {
inner: RefreshCycleDurationGOOGLE::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RefreshCycleDurationGOOGLEBuilder<'a> {
inner: RefreshCycleDurationGOOGLE,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for RefreshCycleDurationGOOGLEBuilder<'a> {
type Target = RefreshCycleDurationGOOGLE;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RefreshCycleDurationGOOGLEBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RefreshCycleDurationGOOGLEBuilder<'a> {
pub fn refresh_duration(
mut self,
refresh_duration: u64,
) -> RefreshCycleDurationGOOGLEBuilder<'a> {
self.inner.refresh_duration = refresh_duration;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RefreshCycleDurationGOOGLE {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPastPresentationTimingGOOGLE.html>"]
pub struct PastPresentationTimingGOOGLE {
pub present_id: u32,
pub desired_present_time: u64,
pub actual_present_time: u64,
pub earliest_present_time: u64,
pub present_margin: u64,
}
impl PastPresentationTimingGOOGLE {
pub fn builder<'a>() -> PastPresentationTimingGOOGLEBuilder<'a> {
PastPresentationTimingGOOGLEBuilder {
inner: PastPresentationTimingGOOGLE::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PastPresentationTimingGOOGLEBuilder<'a> {
inner: PastPresentationTimingGOOGLE,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PastPresentationTimingGOOGLEBuilder<'a> {
type Target = PastPresentationTimingGOOGLE;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PastPresentationTimingGOOGLEBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PastPresentationTimingGOOGLEBuilder<'a> {
pub fn present_id(mut self, present_id: u32) -> PastPresentationTimingGOOGLEBuilder<'a> {
self.inner.present_id = present_id;
self
}
pub fn desired_present_time(
mut self,
desired_present_time: u64,
) -> PastPresentationTimingGOOGLEBuilder<'a> {
self.inner.desired_present_time = desired_present_time;
self
}
pub fn actual_present_time(
mut self,
actual_present_time: u64,
) -> PastPresentationTimingGOOGLEBuilder<'a> {
self.inner.actual_present_time = actual_present_time;
self
}
pub fn earliest_present_time(
mut self,
earliest_present_time: u64,
) -> PastPresentationTimingGOOGLEBuilder<'a> {
self.inner.earliest_present_time = earliest_present_time;
self
}
pub fn present_margin(
mut self,
present_margin: u64,
) -> PastPresentationTimingGOOGLEBuilder<'a> {
self.inner.present_margin = present_margin;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PastPresentationTimingGOOGLE {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentTimesInfoGOOGLE.html>"]
pub struct PresentTimesInfoGOOGLE {
pub s_type: StructureType,
pub p_next: *const c_void,
pub swapchain_count: u32,
pub p_times: *const PresentTimeGOOGLE,
}
impl ::std::default::Default for PresentTimesInfoGOOGLE {
fn default() -> PresentTimesInfoGOOGLE {
PresentTimesInfoGOOGLE {
s_type: StructureType::PRESENT_TIMES_INFO_GOOGLE,
p_next: ::std::ptr::null(),
swapchain_count: u32::default(),
p_times: ::std::ptr::null(),
}
}
}
impl PresentTimesInfoGOOGLE {
pub fn builder<'a>() -> PresentTimesInfoGOOGLEBuilder<'a> {
PresentTimesInfoGOOGLEBuilder {
inner: PresentTimesInfoGOOGLE::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PresentTimesInfoGOOGLEBuilder<'a> {
inner: PresentTimesInfoGOOGLE,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPresentInfoKHR for PresentTimesInfoGOOGLEBuilder<'_> {}
unsafe impl ExtendsPresentInfoKHR for PresentTimesInfoGOOGLE {}
impl<'a> ::std::ops::Deref for PresentTimesInfoGOOGLEBuilder<'a> {
type Target = PresentTimesInfoGOOGLE;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PresentTimesInfoGOOGLEBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PresentTimesInfoGOOGLEBuilder<'a> {
pub fn times(mut self, times: &'a [PresentTimeGOOGLE]) -> PresentTimesInfoGOOGLEBuilder<'a> {
self.inner.swapchain_count = times.len() as _;
self.inner.p_times = times.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PresentTimesInfoGOOGLE {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentTimeGOOGLE.html>"]
pub struct PresentTimeGOOGLE {
pub present_id: u32,
pub desired_present_time: u64,
}
impl PresentTimeGOOGLE {
pub fn builder<'a>() -> PresentTimeGOOGLEBuilder<'a> {
PresentTimeGOOGLEBuilder {
inner: PresentTimeGOOGLE::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PresentTimeGOOGLEBuilder<'a> {
inner: PresentTimeGOOGLE,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for PresentTimeGOOGLEBuilder<'a> {
type Target = PresentTimeGOOGLE;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PresentTimeGOOGLEBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PresentTimeGOOGLEBuilder<'a> {
pub fn present_id(mut self, present_id: u32) -> PresentTimeGOOGLEBuilder<'a> {
self.inner.present_id = present_id;
self
}
pub fn desired_present_time(
mut self,
desired_present_time: u64,
) -> PresentTimeGOOGLEBuilder<'a> {
self.inner.desired_present_time = desired_present_time;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PresentTimeGOOGLE {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIOSSurfaceCreateInfoMVK.html>"]
pub struct IOSSurfaceCreateInfoMVK {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: IOSSurfaceCreateFlagsMVK,
pub p_view: *const c_void,
}
impl ::std::default::Default for IOSSurfaceCreateInfoMVK {
fn default() -> IOSSurfaceCreateInfoMVK {
IOSSurfaceCreateInfoMVK {
s_type: StructureType::IOS_SURFACE_CREATE_INFO_M,
p_next: ::std::ptr::null(),
flags: IOSSurfaceCreateFlagsMVK::default(),
p_view: ::std::ptr::null(),
}
}
}
impl IOSSurfaceCreateInfoMVK {
pub fn builder<'a>() -> IOSSurfaceCreateInfoMVKBuilder<'a> {
IOSSurfaceCreateInfoMVKBuilder {
inner: IOSSurfaceCreateInfoMVK::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct IOSSurfaceCreateInfoMVKBuilder<'a> {
inner: IOSSurfaceCreateInfoMVK,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsIOSSurfaceCreateInfoMVK {}
impl<'a> ::std::ops::Deref for IOSSurfaceCreateInfoMVKBuilder<'a> {
type Target = IOSSurfaceCreateInfoMVK;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for IOSSurfaceCreateInfoMVKBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> IOSSurfaceCreateInfoMVKBuilder<'a> {
pub fn flags(mut self, flags: IOSSurfaceCreateFlagsMVK) -> IOSSurfaceCreateInfoMVKBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn view(mut self, view: &'a c_void) -> IOSSurfaceCreateInfoMVKBuilder<'a> {
self.inner.p_view = view;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsIOSSurfaceCreateInfoMVK>(
mut self,
next: &'a mut T,
) -> IOSSurfaceCreateInfoMVKBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> IOSSurfaceCreateInfoMVK {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMacOSSurfaceCreateInfoMVK.html>"]
pub struct MacOSSurfaceCreateInfoMVK {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: MacOSSurfaceCreateFlagsMVK,
pub p_view: *const c_void,
}
impl ::std::default::Default for MacOSSurfaceCreateInfoMVK {
fn default() -> MacOSSurfaceCreateInfoMVK {
MacOSSurfaceCreateInfoMVK {
s_type: StructureType::MACOS_SURFACE_CREATE_INFO_M,
p_next: ::std::ptr::null(),
flags: MacOSSurfaceCreateFlagsMVK::default(),
p_view: ::std::ptr::null(),
}
}
}
impl MacOSSurfaceCreateInfoMVK {
pub fn builder<'a>() -> MacOSSurfaceCreateInfoMVKBuilder<'a> {
MacOSSurfaceCreateInfoMVKBuilder {
inner: MacOSSurfaceCreateInfoMVK::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MacOSSurfaceCreateInfoMVKBuilder<'a> {
inner: MacOSSurfaceCreateInfoMVK,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMacOSSurfaceCreateInfoMVK {}
impl<'a> ::std::ops::Deref for MacOSSurfaceCreateInfoMVKBuilder<'a> {
type Target = MacOSSurfaceCreateInfoMVK;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MacOSSurfaceCreateInfoMVKBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MacOSSurfaceCreateInfoMVKBuilder<'a> {
pub fn flags(
mut self,
flags: MacOSSurfaceCreateFlagsMVK,
) -> MacOSSurfaceCreateInfoMVKBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn view(mut self, view: &'a c_void) -> MacOSSurfaceCreateInfoMVKBuilder<'a> {
self.inner.p_view = view;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMacOSSurfaceCreateInfoMVK>(
mut self,
next: &'a mut T,
) -> MacOSSurfaceCreateInfoMVKBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MacOSSurfaceCreateInfoMVK {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViewportWScalingNV.html>"]
pub struct ViewportWScalingNV {
pub xcoeff: f32,
pub ycoeff: f32,
}
impl ViewportWScalingNV {
pub fn builder<'a>() -> ViewportWScalingNVBuilder<'a> {
ViewportWScalingNVBuilder {
inner: ViewportWScalingNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ViewportWScalingNVBuilder<'a> {
inner: ViewportWScalingNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ViewportWScalingNVBuilder<'a> {
type Target = ViewportWScalingNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ViewportWScalingNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ViewportWScalingNVBuilder<'a> {
pub fn xcoeff(mut self, xcoeff: f32) -> ViewportWScalingNVBuilder<'a> {
self.inner.xcoeff = xcoeff;
self
}
pub fn ycoeff(mut self, ycoeff: f32) -> ViewportWScalingNVBuilder<'a> {
self.inner.ycoeff = ycoeff;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ViewportWScalingNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportWScalingStateCreateInfoNV.html>"]
pub struct PipelineViewportWScalingStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub viewport_w_scaling_enable: Bool32,
pub viewport_count: u32,
pub p_viewport_w_scalings: *const ViewportWScalingNV,
}
impl ::std::default::Default for PipelineViewportWScalingStateCreateInfoNV {
fn default() -> PipelineViewportWScalingStateCreateInfoNV {
PipelineViewportWScalingStateCreateInfoNV {
s_type: StructureType::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
viewport_w_scaling_enable: Bool32::default(),
viewport_count: u32::default(),
p_viewport_w_scalings: ::std::ptr::null(),
}
}
}
impl PipelineViewportWScalingStateCreateInfoNV {
pub fn builder<'a>() -> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
PipelineViewportWScalingStateCreateInfoNVBuilder {
inner: PipelineViewportWScalingStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
inner: PipelineViewportWScalingStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportWScalingStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportWScalingStateCreateInfoNV {}
impl<'a> ::std::ops::Deref for PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
type Target = PipelineViewportWScalingStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
pub fn viewport_w_scaling_enable(
mut self,
viewport_w_scaling_enable: bool,
) -> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
self.inner.viewport_w_scaling_enable = viewport_w_scaling_enable.into();
self
}
pub fn viewport_w_scalings(
mut self,
viewport_w_scalings: &'a [ViewportWScalingNV],
) -> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
self.inner.viewport_count = viewport_w_scalings.len() as _;
self.inner.p_viewport_w_scalings = viewport_w_scalings.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportWScalingStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViewportSwizzleNV.html>"]
pub struct ViewportSwizzleNV {
pub x: ViewportCoordinateSwizzleNV,
pub y: ViewportCoordinateSwizzleNV,
pub z: ViewportCoordinateSwizzleNV,
pub w: ViewportCoordinateSwizzleNV,
}
impl ViewportSwizzleNV {
pub fn builder<'a>() -> ViewportSwizzleNVBuilder<'a> {
ViewportSwizzleNVBuilder {
inner: ViewportSwizzleNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ViewportSwizzleNVBuilder<'a> {
inner: ViewportSwizzleNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ViewportSwizzleNVBuilder<'a> {
type Target = ViewportSwizzleNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ViewportSwizzleNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ViewportSwizzleNVBuilder<'a> {
pub fn x(mut self, x: ViewportCoordinateSwizzleNV) -> ViewportSwizzleNVBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: ViewportCoordinateSwizzleNV) -> ViewportSwizzleNVBuilder<'a> {
self.inner.y = y;
self
}
pub fn z(mut self, z: ViewportCoordinateSwizzleNV) -> ViewportSwizzleNVBuilder<'a> {
self.inner.z = z;
self
}
pub fn w(mut self, w: ViewportCoordinateSwizzleNV) -> ViewportSwizzleNVBuilder<'a> {
self.inner.w = w;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ViewportSwizzleNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportSwizzleStateCreateInfoNV.html>"]
pub struct PipelineViewportSwizzleStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineViewportSwizzleStateCreateFlagsNV,
pub viewport_count: u32,
pub p_viewport_swizzles: *const ViewportSwizzleNV,
}
impl ::std::default::Default for PipelineViewportSwizzleStateCreateInfoNV {
fn default() -> PipelineViewportSwizzleStateCreateInfoNV {
PipelineViewportSwizzleStateCreateInfoNV {
s_type: StructureType::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
flags: PipelineViewportSwizzleStateCreateFlagsNV::default(),
viewport_count: u32::default(),
p_viewport_swizzles: ::std::ptr::null(),
}
}
}
impl PipelineViewportSwizzleStateCreateInfoNV {
pub fn builder<'a>() -> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
PipelineViewportSwizzleStateCreateInfoNVBuilder {
inner: PipelineViewportSwizzleStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
inner: PipelineViewportSwizzleStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportSwizzleStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportSwizzleStateCreateInfoNV {}
impl<'a> ::std::ops::Deref for PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
type Target = PipelineViewportSwizzleStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineViewportSwizzleStateCreateFlagsNV,
) -> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn viewport_swizzles(
mut self,
viewport_swizzles: &'a [ViewportSwizzleNV],
) -> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
self.inner.viewport_count = viewport_swizzles.len() as _;
self.inner.p_viewport_swizzles = viewport_swizzles.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportSwizzleStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html>"]
pub struct PhysicalDeviceDiscardRectanglePropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_discard_rectangles: u32,
}
impl ::std::default::Default for PhysicalDeviceDiscardRectanglePropertiesEXT {
fn default() -> PhysicalDeviceDiscardRectanglePropertiesEXT {
PhysicalDeviceDiscardRectanglePropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_discard_rectangles: u32::default(),
}
}
}
impl PhysicalDeviceDiscardRectanglePropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
PhysicalDeviceDiscardRectanglePropertiesEXTBuilder {
inner: PhysicalDeviceDiscardRectanglePropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
inner: PhysicalDeviceDiscardRectanglePropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDiscardRectanglePropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceDiscardRectanglePropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
pub fn max_discard_rectangles(
mut self,
max_discard_rectangles: u32,
) -> PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
self.inner.max_discard_rectangles = max_discard_rectangles;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceDiscardRectanglePropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineDiscardRectangleStateCreateInfoEXT.html>"]
pub struct PipelineDiscardRectangleStateCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineDiscardRectangleStateCreateFlagsEXT,
pub discard_rectangle_mode: DiscardRectangleModeEXT,
pub discard_rectangle_count: u32,
pub p_discard_rectangles: *const Rect2D,
}
impl ::std::default::Default for PipelineDiscardRectangleStateCreateInfoEXT {
fn default() -> PipelineDiscardRectangleStateCreateInfoEXT {
PipelineDiscardRectangleStateCreateInfoEXT {
s_type: StructureType::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: PipelineDiscardRectangleStateCreateFlagsEXT::default(),
discard_rectangle_mode: DiscardRectangleModeEXT::default(),
discard_rectangle_count: u32::default(),
p_discard_rectangles: ::std::ptr::null(),
}
}
}
impl PipelineDiscardRectangleStateCreateInfoEXT {
pub fn builder<'a>() -> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
PipelineDiscardRectangleStateCreateInfoEXTBuilder {
inner: PipelineDiscardRectangleStateCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
inner: PipelineDiscardRectangleStateCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsGraphicsPipelineCreateInfo
for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineDiscardRectangleStateCreateInfoEXT {}
impl<'a> ::std::ops::Deref for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
type Target = PipelineDiscardRectangleStateCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineDiscardRectangleStateCreateFlagsEXT,
) -> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn discard_rectangle_mode(
mut self,
discard_rectangle_mode: DiscardRectangleModeEXT,
) -> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
self.inner.discard_rectangle_mode = discard_rectangle_mode;
self
}
pub fn discard_rectangles(
mut self,
discard_rectangles: &'a [Rect2D],
) -> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
self.inner.discard_rectangle_count = discard_rectangles.len() as _;
self.inner.p_discard_rectangles = discard_rectangles.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineDiscardRectangleStateCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html>"]
pub struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub per_view_position_all_components: Bool32,
}
impl ::std::default::Default for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
fn default() -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
s_type: StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
p_next: ::std::ptr::null_mut(),
per_view_position_all_components: Bool32::default(),
}
}
}
impl PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
pub fn builder<'a>() -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder {
inner: PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
inner: PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
{
}
impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
type Target = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
pub fn per_view_position_all_components(
mut self,
per_view_position_all_components: bool,
) -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
self.inner.per_view_position_all_components = per_view_position_all_components.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkInputAttachmentAspectReference.html>"]
pub struct InputAttachmentAspectReference {
pub subpass: u32,
pub input_attachment_index: u32,
pub aspect_mask: ImageAspectFlags,
}
impl InputAttachmentAspectReference {
pub fn builder<'a>() -> InputAttachmentAspectReferenceBuilder<'a> {
InputAttachmentAspectReferenceBuilder {
inner: InputAttachmentAspectReference::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct InputAttachmentAspectReferenceBuilder<'a> {
inner: InputAttachmentAspectReference,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for InputAttachmentAspectReferenceBuilder<'a> {
type Target = InputAttachmentAspectReference;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for InputAttachmentAspectReferenceBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> InputAttachmentAspectReferenceBuilder<'a> {
pub fn subpass(mut self, subpass: u32) -> InputAttachmentAspectReferenceBuilder<'a> {
self.inner.subpass = subpass;
self
}
pub fn input_attachment_index(
mut self,
input_attachment_index: u32,
) -> InputAttachmentAspectReferenceBuilder<'a> {
self.inner.input_attachment_index = input_attachment_index;
self
}
pub fn aspect_mask(
mut self,
aspect_mask: ImageAspectFlags,
) -> InputAttachmentAspectReferenceBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> InputAttachmentAspectReference {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassInputAttachmentAspectCreateInfo.html>"]
pub struct RenderPassInputAttachmentAspectCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub aspect_reference_count: u32,
pub p_aspect_references: *const InputAttachmentAspectReference,
}
impl ::std::default::Default for RenderPassInputAttachmentAspectCreateInfo {
fn default() -> RenderPassInputAttachmentAspectCreateInfo {
RenderPassInputAttachmentAspectCreateInfo {
s_type: StructureType::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
p_next: ::std::ptr::null(),
aspect_reference_count: u32::default(),
p_aspect_references: ::std::ptr::null(),
}
}
}
impl RenderPassInputAttachmentAspectCreateInfo {
pub fn builder<'a>() -> RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
RenderPassInputAttachmentAspectCreateInfoBuilder {
inner: RenderPassInputAttachmentAspectCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
inner: RenderPassInputAttachmentAspectCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassInputAttachmentAspectCreateInfoBuilder<'_> {}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassInputAttachmentAspectCreateInfo {}
impl<'a> ::std::ops::Deref for RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
type Target = RenderPassInputAttachmentAspectCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
pub fn aspect_references(
mut self,
aspect_references: &'a [InputAttachmentAspectReference],
) -> RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
self.inner.aspect_reference_count = aspect_references.len() as _;
self.inner.p_aspect_references = aspect_references.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassInputAttachmentAspectCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSurfaceInfo2KHR.html>"]
pub struct PhysicalDeviceSurfaceInfo2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub surface: SurfaceKHR,
}
impl ::std::default::Default for PhysicalDeviceSurfaceInfo2KHR {
fn default() -> PhysicalDeviceSurfaceInfo2KHR {
PhysicalDeviceSurfaceInfo2KHR {
s_type: StructureType::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
p_next: ::std::ptr::null(),
surface: SurfaceKHR::default(),
}
}
}
impl PhysicalDeviceSurfaceInfo2KHR {
pub fn builder<'a>() -> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
PhysicalDeviceSurfaceInfo2KHRBuilder {
inner: PhysicalDeviceSurfaceInfo2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
inner: PhysicalDeviceSurfaceInfo2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsPhysicalDeviceSurfaceInfo2KHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
type Target = PhysicalDeviceSurfaceInfo2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
pub fn surface(mut self, surface: SurfaceKHR) -> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
self.inner.surface = surface;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsPhysicalDeviceSurfaceInfo2KHR>(
mut self,
next: &'a mut T,
) -> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSurfaceInfo2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceCapabilities2KHR.html>"]
pub struct SurfaceCapabilities2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub surface_capabilities: SurfaceCapabilitiesKHR,
}
impl ::std::default::Default for SurfaceCapabilities2KHR {
fn default() -> SurfaceCapabilities2KHR {
SurfaceCapabilities2KHR {
s_type: StructureType::SURFACE_CAPABILITIES_2_KHR,
p_next: ::std::ptr::null_mut(),
surface_capabilities: SurfaceCapabilitiesKHR::default(),
}
}
}
impl SurfaceCapabilities2KHR {
pub fn builder<'a>() -> SurfaceCapabilities2KHRBuilder<'a> {
SurfaceCapabilities2KHRBuilder {
inner: SurfaceCapabilities2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SurfaceCapabilities2KHRBuilder<'a> {
inner: SurfaceCapabilities2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSurfaceCapabilities2KHR {}
impl<'a> ::std::ops::Deref for SurfaceCapabilities2KHRBuilder<'a> {
type Target = SurfaceCapabilities2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SurfaceCapabilities2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SurfaceCapabilities2KHRBuilder<'a> {
pub fn surface_capabilities(
mut self,
surface_capabilities: SurfaceCapabilitiesKHR,
) -> SurfaceCapabilities2KHRBuilder<'a> {
self.inner.surface_capabilities = surface_capabilities;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSurfaceCapabilities2KHR>(
mut self,
next: &'a mut T,
) -> SurfaceCapabilities2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SurfaceCapabilities2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceFormat2KHR.html>"]
pub struct SurfaceFormat2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub surface_format: SurfaceFormatKHR,
}
impl ::std::default::Default for SurfaceFormat2KHR {
fn default() -> SurfaceFormat2KHR {
SurfaceFormat2KHR {
s_type: StructureType::SURFACE_FORMAT_2_KHR,
p_next: ::std::ptr::null_mut(),
surface_format: SurfaceFormatKHR::default(),
}
}
}
impl SurfaceFormat2KHR {
pub fn builder<'a>() -> SurfaceFormat2KHRBuilder<'a> {
SurfaceFormat2KHRBuilder {
inner: SurfaceFormat2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SurfaceFormat2KHRBuilder<'a> {
inner: SurfaceFormat2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSurfaceFormat2KHR {}
impl<'a> ::std::ops::Deref for SurfaceFormat2KHRBuilder<'a> {
type Target = SurfaceFormat2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SurfaceFormat2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SurfaceFormat2KHRBuilder<'a> {
pub fn surface_format(
mut self,
surface_format: SurfaceFormatKHR,
) -> SurfaceFormat2KHRBuilder<'a> {
self.inner.surface_format = surface_format;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSurfaceFormat2KHR>(
mut self,
next: &'a mut T,
) -> SurfaceFormat2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SurfaceFormat2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayProperties2KHR.html>"]
pub struct DisplayProperties2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub display_properties: DisplayPropertiesKHR,
}
impl ::std::default::Default for DisplayProperties2KHR {
fn default() -> DisplayProperties2KHR {
DisplayProperties2KHR {
s_type: StructureType::DISPLAY_PROPERTIES_2_KHR,
p_next: ::std::ptr::null_mut(),
display_properties: DisplayPropertiesKHR::default(),
}
}
}
impl DisplayProperties2KHR {
pub fn builder<'a>() -> DisplayProperties2KHRBuilder<'a> {
DisplayProperties2KHRBuilder {
inner: DisplayProperties2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayProperties2KHRBuilder<'a> {
inner: DisplayProperties2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayProperties2KHR {}
impl<'a> ::std::ops::Deref for DisplayProperties2KHRBuilder<'a> {
type Target = DisplayProperties2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayProperties2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayProperties2KHRBuilder<'a> {
pub fn display_properties(
mut self,
display_properties: DisplayPropertiesKHR,
) -> DisplayProperties2KHRBuilder<'a> {
self.inner.display_properties = display_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayProperties2KHR>(
mut self,
next: &'a mut T,
) -> DisplayProperties2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayProperties2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlaneProperties2KHR.html>"]
pub struct DisplayPlaneProperties2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub display_plane_properties: DisplayPlanePropertiesKHR,
}
impl ::std::default::Default for DisplayPlaneProperties2KHR {
fn default() -> DisplayPlaneProperties2KHR {
DisplayPlaneProperties2KHR {
s_type: StructureType::DISPLAY_PLANE_PROPERTIES_2_KHR,
p_next: ::std::ptr::null_mut(),
display_plane_properties: DisplayPlanePropertiesKHR::default(),
}
}
}
impl DisplayPlaneProperties2KHR {
pub fn builder<'a>() -> DisplayPlaneProperties2KHRBuilder<'a> {
DisplayPlaneProperties2KHRBuilder {
inner: DisplayPlaneProperties2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPlaneProperties2KHRBuilder<'a> {
inner: DisplayPlaneProperties2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayPlaneProperties2KHR {}
impl<'a> ::std::ops::Deref for DisplayPlaneProperties2KHRBuilder<'a> {
type Target = DisplayPlaneProperties2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPlaneProperties2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPlaneProperties2KHRBuilder<'a> {
pub fn display_plane_properties(
mut self,
display_plane_properties: DisplayPlanePropertiesKHR,
) -> DisplayPlaneProperties2KHRBuilder<'a> {
self.inner.display_plane_properties = display_plane_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayPlaneProperties2KHR>(
mut self,
next: &'a mut T,
) -> DisplayPlaneProperties2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPlaneProperties2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayModeProperties2KHR.html>"]
pub struct DisplayModeProperties2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub display_mode_properties: DisplayModePropertiesKHR,
}
impl ::std::default::Default for DisplayModeProperties2KHR {
fn default() -> DisplayModeProperties2KHR {
DisplayModeProperties2KHR {
s_type: StructureType::DISPLAY_MODE_PROPERTIES_2_KHR,
p_next: ::std::ptr::null_mut(),
display_mode_properties: DisplayModePropertiesKHR::default(),
}
}
}
impl DisplayModeProperties2KHR {
pub fn builder<'a>() -> DisplayModeProperties2KHRBuilder<'a> {
DisplayModeProperties2KHRBuilder {
inner: DisplayModeProperties2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayModeProperties2KHRBuilder<'a> {
inner: DisplayModeProperties2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayModeProperties2KHR {}
impl<'a> ::std::ops::Deref for DisplayModeProperties2KHRBuilder<'a> {
type Target = DisplayModeProperties2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayModeProperties2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayModeProperties2KHRBuilder<'a> {
pub fn display_mode_properties(
mut self,
display_mode_properties: DisplayModePropertiesKHR,
) -> DisplayModeProperties2KHRBuilder<'a> {
self.inner.display_mode_properties = display_mode_properties;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayModeProperties2KHR>(
mut self,
next: &'a mut T,
) -> DisplayModeProperties2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayModeProperties2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlaneInfo2KHR.html>"]
pub struct DisplayPlaneInfo2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub mode: DisplayModeKHR,
pub plane_index: u32,
}
impl ::std::default::Default for DisplayPlaneInfo2KHR {
fn default() -> DisplayPlaneInfo2KHR {
DisplayPlaneInfo2KHR {
s_type: StructureType::DISPLAY_PLANE_INFO_2_KHR,
p_next: ::std::ptr::null(),
mode: DisplayModeKHR::default(),
plane_index: u32::default(),
}
}
}
impl DisplayPlaneInfo2KHR {
pub fn builder<'a>() -> DisplayPlaneInfo2KHRBuilder<'a> {
DisplayPlaneInfo2KHRBuilder {
inner: DisplayPlaneInfo2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPlaneInfo2KHRBuilder<'a> {
inner: DisplayPlaneInfo2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayPlaneInfo2KHR {}
impl<'a> ::std::ops::Deref for DisplayPlaneInfo2KHRBuilder<'a> {
type Target = DisplayPlaneInfo2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPlaneInfo2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPlaneInfo2KHRBuilder<'a> {
pub fn mode(mut self, mode: DisplayModeKHR) -> DisplayPlaneInfo2KHRBuilder<'a> {
self.inner.mode = mode;
self
}
pub fn plane_index(mut self, plane_index: u32) -> DisplayPlaneInfo2KHRBuilder<'a> {
self.inner.plane_index = plane_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayPlaneInfo2KHR>(
mut self,
next: &'a mut T,
) -> DisplayPlaneInfo2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPlaneInfo2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlaneCapabilities2KHR.html>"]
pub struct DisplayPlaneCapabilities2KHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub capabilities: DisplayPlaneCapabilitiesKHR,
}
impl ::std::default::Default for DisplayPlaneCapabilities2KHR {
fn default() -> DisplayPlaneCapabilities2KHR {
DisplayPlaneCapabilities2KHR {
s_type: StructureType::DISPLAY_PLANE_CAPABILITIES_2_KHR,
p_next: ::std::ptr::null_mut(),
capabilities: DisplayPlaneCapabilitiesKHR::default(),
}
}
}
impl DisplayPlaneCapabilities2KHR {
pub fn builder<'a>() -> DisplayPlaneCapabilities2KHRBuilder<'a> {
DisplayPlaneCapabilities2KHRBuilder {
inner: DisplayPlaneCapabilities2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DisplayPlaneCapabilities2KHRBuilder<'a> {
inner: DisplayPlaneCapabilities2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDisplayPlaneCapabilities2KHR {}
impl<'a> ::std::ops::Deref for DisplayPlaneCapabilities2KHRBuilder<'a> {
type Target = DisplayPlaneCapabilities2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DisplayPlaneCapabilities2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DisplayPlaneCapabilities2KHRBuilder<'a> {
pub fn capabilities(
mut self,
capabilities: DisplayPlaneCapabilitiesKHR,
) -> DisplayPlaneCapabilities2KHRBuilder<'a> {
self.inner.capabilities = capabilities;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDisplayPlaneCapabilities2KHR>(
mut self,
next: &'a mut T,
) -> DisplayPlaneCapabilities2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DisplayPlaneCapabilities2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSharedPresentSurfaceCapabilitiesKHR.html>"]
pub struct SharedPresentSurfaceCapabilitiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shared_present_supported_usage_flags: ImageUsageFlags,
}
impl ::std::default::Default for SharedPresentSurfaceCapabilitiesKHR {
fn default() -> SharedPresentSurfaceCapabilitiesKHR {
SharedPresentSurfaceCapabilitiesKHR {
s_type: StructureType::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
p_next: ::std::ptr::null_mut(),
shared_present_supported_usage_flags: ImageUsageFlags::default(),
}
}
}
impl SharedPresentSurfaceCapabilitiesKHR {
pub fn builder<'a>() -> SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
SharedPresentSurfaceCapabilitiesKHRBuilder {
inner: SharedPresentSurfaceCapabilitiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
inner: SharedPresentSurfaceCapabilitiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSurfaceCapabilities2KHR for SharedPresentSurfaceCapabilitiesKHRBuilder<'_> {}
unsafe impl ExtendsSurfaceCapabilities2KHR for SharedPresentSurfaceCapabilitiesKHR {}
impl<'a> ::std::ops::Deref for SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
type Target = SharedPresentSurfaceCapabilitiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
pub fn shared_present_supported_usage_flags(
mut self,
shared_present_supported_usage_flags: ImageUsageFlags,
) -> SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
self.inner.shared_present_supported_usage_flags = shared_present_supported_usage_flags;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SharedPresentSurfaceCapabilitiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevice16BitStorageFeatures.html>"]
pub struct PhysicalDevice16BitStorageFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub storage_buffer16_bit_access: Bool32,
pub uniform_and_storage_buffer16_bit_access: Bool32,
pub storage_push_constant16: Bool32,
pub storage_input_output16: Bool32,
}
impl ::std::default::Default for PhysicalDevice16BitStorageFeatures {
fn default() -> PhysicalDevice16BitStorageFeatures {
PhysicalDevice16BitStorageFeatures {
s_type: StructureType::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
p_next: ::std::ptr::null_mut(),
storage_buffer16_bit_access: Bool32::default(),
uniform_and_storage_buffer16_bit_access: Bool32::default(),
storage_push_constant16: Bool32::default(),
storage_input_output16: Bool32::default(),
}
}
}
impl PhysicalDevice16BitStorageFeatures {
pub fn builder<'a>() -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
PhysicalDevice16BitStorageFeaturesBuilder {
inner: PhysicalDevice16BitStorageFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevice16BitStorageFeaturesBuilder<'a> {
inner: PhysicalDevice16BitStorageFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice16BitStorageFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice16BitStorageFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDevice16BitStorageFeaturesBuilder<'a> {
type Target = PhysicalDevice16BitStorageFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevice16BitStorageFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
pub fn storage_buffer16_bit_access(
mut self,
storage_buffer16_bit_access: bool,
) -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
self.inner.storage_buffer16_bit_access = storage_buffer16_bit_access.into();
self
}
pub fn uniform_and_storage_buffer16_bit_access(
mut self,
uniform_and_storage_buffer16_bit_access: bool,
) -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
self.inner.uniform_and_storage_buffer16_bit_access =
uniform_and_storage_buffer16_bit_access.into();
self
}
pub fn storage_push_constant16(
mut self,
storage_push_constant16: bool,
) -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
self.inner.storage_push_constant16 = storage_push_constant16.into();
self
}
pub fn storage_input_output16(
mut self,
storage_input_output16: bool,
) -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
self.inner.storage_input_output16 = storage_input_output16.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDevice16BitStorageFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSubgroupProperties.html>"]
pub struct PhysicalDeviceSubgroupProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub subgroup_size: u32,
pub supported_stages: ShaderStageFlags,
pub supported_operations: SubgroupFeatureFlags,
pub quad_operations_in_all_stages: Bool32,
}
impl ::std::default::Default for PhysicalDeviceSubgroupProperties {
fn default() -> PhysicalDeviceSubgroupProperties {
PhysicalDeviceSubgroupProperties {
s_type: StructureType::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
p_next: ::std::ptr::null_mut(),
subgroup_size: u32::default(),
supported_stages: ShaderStageFlags::default(),
supported_operations: SubgroupFeatureFlags::default(),
quad_operations_in_all_stages: Bool32::default(),
}
}
}
impl PhysicalDeviceSubgroupProperties {
pub fn builder<'a>() -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
PhysicalDeviceSubgroupPropertiesBuilder {
inner: PhysicalDeviceSubgroupProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSubgroupPropertiesBuilder<'a> {
inner: PhysicalDeviceSubgroupProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupPropertiesBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupProperties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSubgroupPropertiesBuilder<'a> {
type Target = PhysicalDeviceSubgroupProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubgroupPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
pub fn subgroup_size(
mut self,
subgroup_size: u32,
) -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
self.inner.subgroup_size = subgroup_size;
self
}
pub fn supported_stages(
mut self,
supported_stages: ShaderStageFlags,
) -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
self.inner.supported_stages = supported_stages;
self
}
pub fn supported_operations(
mut self,
supported_operations: SubgroupFeatureFlags,
) -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
self.inner.supported_operations = supported_operations;
self
}
pub fn quad_operations_in_all_stages(
mut self,
quad_operations_in_all_stages: bool,
) -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
self.inner.quad_operations_in_all_stages = quad_operations_in_all_stages.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSubgroupProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferMemoryRequirementsInfo2.html>"]
pub struct BufferMemoryRequirementsInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub buffer: Buffer,
}
impl ::std::default::Default for BufferMemoryRequirementsInfo2 {
fn default() -> BufferMemoryRequirementsInfo2 {
BufferMemoryRequirementsInfo2 {
s_type: StructureType::BUFFER_MEMORY_REQUIREMENTS_INFO_2,
p_next: ::std::ptr::null(),
buffer: Buffer::default(),
}
}
}
impl BufferMemoryRequirementsInfo2 {
pub fn builder<'a>() -> BufferMemoryRequirementsInfo2Builder<'a> {
BufferMemoryRequirementsInfo2Builder {
inner: BufferMemoryRequirementsInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferMemoryRequirementsInfo2Builder<'a> {
inner: BufferMemoryRequirementsInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBufferMemoryRequirementsInfo2 {}
impl<'a> ::std::ops::Deref for BufferMemoryRequirementsInfo2Builder<'a> {
type Target = BufferMemoryRequirementsInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferMemoryRequirementsInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferMemoryRequirementsInfo2Builder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> BufferMemoryRequirementsInfo2Builder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBufferMemoryRequirementsInfo2>(
mut self,
next: &'a mut T,
) -> BufferMemoryRequirementsInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferMemoryRequirementsInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageMemoryRequirementsInfo2.html>"]
pub struct ImageMemoryRequirementsInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub image: Image,
}
impl ::std::default::Default for ImageMemoryRequirementsInfo2 {
fn default() -> ImageMemoryRequirementsInfo2 {
ImageMemoryRequirementsInfo2 {
s_type: StructureType::IMAGE_MEMORY_REQUIREMENTS_INFO_2,
p_next: ::std::ptr::null(),
image: Image::default(),
}
}
}
impl ImageMemoryRequirementsInfo2 {
pub fn builder<'a>() -> ImageMemoryRequirementsInfo2Builder<'a> {
ImageMemoryRequirementsInfo2Builder {
inner: ImageMemoryRequirementsInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageMemoryRequirementsInfo2Builder<'a> {
inner: ImageMemoryRequirementsInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageMemoryRequirementsInfo2 {}
impl<'a> ::std::ops::Deref for ImageMemoryRequirementsInfo2Builder<'a> {
type Target = ImageMemoryRequirementsInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageMemoryRequirementsInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageMemoryRequirementsInfo2Builder<'a> {
pub fn image(mut self, image: Image) -> ImageMemoryRequirementsInfo2Builder<'a> {
self.inner.image = image;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageMemoryRequirementsInfo2>(
mut self,
next: &'a mut T,
) -> ImageMemoryRequirementsInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageMemoryRequirementsInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageSparseMemoryRequirementsInfo2.html>"]
pub struct ImageSparseMemoryRequirementsInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub image: Image,
}
impl ::std::default::Default for ImageSparseMemoryRequirementsInfo2 {
fn default() -> ImageSparseMemoryRequirementsInfo2 {
ImageSparseMemoryRequirementsInfo2 {
s_type: StructureType::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
p_next: ::std::ptr::null(),
image: Image::default(),
}
}
}
impl ImageSparseMemoryRequirementsInfo2 {
pub fn builder<'a>() -> ImageSparseMemoryRequirementsInfo2Builder<'a> {
ImageSparseMemoryRequirementsInfo2Builder {
inner: ImageSparseMemoryRequirementsInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageSparseMemoryRequirementsInfo2Builder<'a> {
inner: ImageSparseMemoryRequirementsInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageSparseMemoryRequirementsInfo2 {}
impl<'a> ::std::ops::Deref for ImageSparseMemoryRequirementsInfo2Builder<'a> {
type Target = ImageSparseMemoryRequirementsInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageSparseMemoryRequirementsInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageSparseMemoryRequirementsInfo2Builder<'a> {
pub fn image(mut self, image: Image) -> ImageSparseMemoryRequirementsInfo2Builder<'a> {
self.inner.image = image;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageSparseMemoryRequirementsInfo2>(
mut self,
next: &'a mut T,
) -> ImageSparseMemoryRequirementsInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageSparseMemoryRequirementsInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryRequirements2.html>"]
pub struct MemoryRequirements2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_requirements: MemoryRequirements,
}
impl ::std::default::Default for MemoryRequirements2 {
fn default() -> MemoryRequirements2 {
MemoryRequirements2 {
s_type: StructureType::MEMORY_REQUIREMENTS_2,
p_next: ::std::ptr::null_mut(),
memory_requirements: MemoryRequirements::default(),
}
}
}
impl MemoryRequirements2 {
pub fn builder<'a>() -> MemoryRequirements2Builder<'a> {
MemoryRequirements2Builder {
inner: MemoryRequirements2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryRequirements2Builder<'a> {
inner: MemoryRequirements2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryRequirements2 {}
impl<'a> ::std::ops::Deref for MemoryRequirements2Builder<'a> {
type Target = MemoryRequirements2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryRequirements2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryRequirements2Builder<'a> {
pub fn memory_requirements(
mut self,
memory_requirements: MemoryRequirements,
) -> MemoryRequirements2Builder<'a> {
self.inner.memory_requirements = memory_requirements;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryRequirements2>(
mut self,
next: &'a mut T,
) -> MemoryRequirements2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryRequirements2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageMemoryRequirements2.html>"]
pub struct SparseImageMemoryRequirements2 {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_requirements: SparseImageMemoryRequirements,
}
impl ::std::default::Default for SparseImageMemoryRequirements2 {
fn default() -> SparseImageMemoryRequirements2 {
SparseImageMemoryRequirements2 {
s_type: StructureType::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
p_next: ::std::ptr::null_mut(),
memory_requirements: SparseImageMemoryRequirements::default(),
}
}
}
impl SparseImageMemoryRequirements2 {
pub fn builder<'a>() -> SparseImageMemoryRequirements2Builder<'a> {
SparseImageMemoryRequirements2Builder {
inner: SparseImageMemoryRequirements2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SparseImageMemoryRequirements2Builder<'a> {
inner: SparseImageMemoryRequirements2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSparseImageMemoryRequirements2 {}
impl<'a> ::std::ops::Deref for SparseImageMemoryRequirements2Builder<'a> {
type Target = SparseImageMemoryRequirements2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SparseImageMemoryRequirements2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SparseImageMemoryRequirements2Builder<'a> {
pub fn memory_requirements(
mut self,
memory_requirements: SparseImageMemoryRequirements,
) -> SparseImageMemoryRequirements2Builder<'a> {
self.inner.memory_requirements = memory_requirements;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSparseImageMemoryRequirements2>(
mut self,
next: &'a mut T,
) -> SparseImageMemoryRequirements2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SparseImageMemoryRequirements2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevicePointClippingProperties.html>"]
pub struct PhysicalDevicePointClippingProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub point_clipping_behavior: PointClippingBehavior,
}
impl ::std::default::Default for PhysicalDevicePointClippingProperties {
fn default() -> PhysicalDevicePointClippingProperties {
PhysicalDevicePointClippingProperties {
s_type: StructureType::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
p_next: ::std::ptr::null_mut(),
point_clipping_behavior: PointClippingBehavior::default(),
}
}
}
impl PhysicalDevicePointClippingProperties {
pub fn builder<'a>() -> PhysicalDevicePointClippingPropertiesBuilder<'a> {
PhysicalDevicePointClippingPropertiesBuilder {
inner: PhysicalDevicePointClippingProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevicePointClippingPropertiesBuilder<'a> {
inner: PhysicalDevicePointClippingProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePointClippingPropertiesBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePointClippingProperties {}
impl<'a> ::std::ops::Deref for PhysicalDevicePointClippingPropertiesBuilder<'a> {
type Target = PhysicalDevicePointClippingProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevicePointClippingPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevicePointClippingPropertiesBuilder<'a> {
pub fn point_clipping_behavior(
mut self,
point_clipping_behavior: PointClippingBehavior,
) -> PhysicalDevicePointClippingPropertiesBuilder<'a> {
self.inner.point_clipping_behavior = point_clipping_behavior;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDevicePointClippingProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryDedicatedRequirements.html>"]
pub struct MemoryDedicatedRequirements {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub prefers_dedicated_allocation: Bool32,
pub requires_dedicated_allocation: Bool32,
}
impl ::std::default::Default for MemoryDedicatedRequirements {
fn default() -> MemoryDedicatedRequirements {
MemoryDedicatedRequirements {
s_type: StructureType::MEMORY_DEDICATED_REQUIREMENTS,
p_next: ::std::ptr::null_mut(),
prefers_dedicated_allocation: Bool32::default(),
requires_dedicated_allocation: Bool32::default(),
}
}
}
impl MemoryDedicatedRequirements {
pub fn builder<'a>() -> MemoryDedicatedRequirementsBuilder<'a> {
MemoryDedicatedRequirementsBuilder {
inner: MemoryDedicatedRequirements::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryDedicatedRequirementsBuilder<'a> {
inner: MemoryDedicatedRequirements,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryRequirements2 for MemoryDedicatedRequirementsBuilder<'_> {}
unsafe impl ExtendsMemoryRequirements2 for MemoryDedicatedRequirements {}
impl<'a> ::std::ops::Deref for MemoryDedicatedRequirementsBuilder<'a> {
type Target = MemoryDedicatedRequirements;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryDedicatedRequirementsBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryDedicatedRequirementsBuilder<'a> {
pub fn prefers_dedicated_allocation(
mut self,
prefers_dedicated_allocation: bool,
) -> MemoryDedicatedRequirementsBuilder<'a> {
self.inner.prefers_dedicated_allocation = prefers_dedicated_allocation.into();
self
}
pub fn requires_dedicated_allocation(
mut self,
requires_dedicated_allocation: bool,
) -> MemoryDedicatedRequirementsBuilder<'a> {
self.inner.requires_dedicated_allocation = requires_dedicated_allocation.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryDedicatedRequirements {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryDedicatedAllocateInfo.html>"]
pub struct MemoryDedicatedAllocateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub image: Image,
pub buffer: Buffer,
}
impl ::std::default::Default for MemoryDedicatedAllocateInfo {
fn default() -> MemoryDedicatedAllocateInfo {
MemoryDedicatedAllocateInfo {
s_type: StructureType::MEMORY_DEDICATED_ALLOCATE_INFO,
p_next: ::std::ptr::null(),
image: Image::default(),
buffer: Buffer::default(),
}
}
}
impl MemoryDedicatedAllocateInfo {
pub fn builder<'a>() -> MemoryDedicatedAllocateInfoBuilder<'a> {
MemoryDedicatedAllocateInfoBuilder {
inner: MemoryDedicatedAllocateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryDedicatedAllocateInfoBuilder<'a> {
inner: MemoryDedicatedAllocateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for MemoryDedicatedAllocateInfoBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for MemoryDedicatedAllocateInfo {}
impl<'a> ::std::ops::Deref for MemoryDedicatedAllocateInfoBuilder<'a> {
type Target = MemoryDedicatedAllocateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryDedicatedAllocateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryDedicatedAllocateInfoBuilder<'a> {
pub fn image(mut self, image: Image) -> MemoryDedicatedAllocateInfoBuilder<'a> {
self.inner.image = image;
self
}
pub fn buffer(mut self, buffer: Buffer) -> MemoryDedicatedAllocateInfoBuilder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryDedicatedAllocateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageViewUsageCreateInfo.html>"]
pub struct ImageViewUsageCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub usage: ImageUsageFlags,
}
impl ::std::default::Default for ImageViewUsageCreateInfo {
fn default() -> ImageViewUsageCreateInfo {
ImageViewUsageCreateInfo {
s_type: StructureType::IMAGE_VIEW_USAGE_CREATE_INFO,
p_next: ::std::ptr::null(),
usage: ImageUsageFlags::default(),
}
}
}
impl ImageViewUsageCreateInfo {
pub fn builder<'a>() -> ImageViewUsageCreateInfoBuilder<'a> {
ImageViewUsageCreateInfoBuilder {
inner: ImageViewUsageCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageViewUsageCreateInfoBuilder<'a> {
inner: ImageViewUsageCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageViewCreateInfo for ImageViewUsageCreateInfoBuilder<'_> {}
unsafe impl ExtendsImageViewCreateInfo for ImageViewUsageCreateInfo {}
impl<'a> ::std::ops::Deref for ImageViewUsageCreateInfoBuilder<'a> {
type Target = ImageViewUsageCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageViewUsageCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageViewUsageCreateInfoBuilder<'a> {
pub fn usage(mut self, usage: ImageUsageFlags) -> ImageViewUsageCreateInfoBuilder<'a> {
self.inner.usage = usage;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageViewUsageCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineTessellationDomainOriginStateCreateInfo.html>"]
pub struct PipelineTessellationDomainOriginStateCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub domain_origin: TessellationDomainOrigin,
}
impl ::std::default::Default for PipelineTessellationDomainOriginStateCreateInfo {
fn default() -> PipelineTessellationDomainOriginStateCreateInfo {
PipelineTessellationDomainOriginStateCreateInfo {
s_type: StructureType::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
p_next: ::std::ptr::null(),
domain_origin: TessellationDomainOrigin::default(),
}
}
}
impl PipelineTessellationDomainOriginStateCreateInfo {
pub fn builder<'a>() -> PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
PipelineTessellationDomainOriginStateCreateInfoBuilder {
inner: PipelineTessellationDomainOriginStateCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
inner: PipelineTessellationDomainOriginStateCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineTessellationStateCreateInfo
for PipelineTessellationDomainOriginStateCreateInfoBuilder<'_>
{
}
unsafe impl ExtendsPipelineTessellationStateCreateInfo
for PipelineTessellationDomainOriginStateCreateInfo
{
}
impl<'a> ::std::ops::Deref for PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
type Target = PipelineTessellationDomainOriginStateCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
pub fn domain_origin(
mut self,
domain_origin: TessellationDomainOrigin,
) -> PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
self.inner.domain_origin = domain_origin;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineTessellationDomainOriginStateCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrConversionInfo.html>"]
pub struct SamplerYcbcrConversionInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub conversion: SamplerYcbcrConversion,
}
impl ::std::default::Default for SamplerYcbcrConversionInfo {
fn default() -> SamplerYcbcrConversionInfo {
SamplerYcbcrConversionInfo {
s_type: StructureType::SAMPLER_YCBCR_CONVERSION_INFO,
p_next: ::std::ptr::null(),
conversion: SamplerYcbcrConversion::default(),
}
}
}
impl SamplerYcbcrConversionInfo {
pub fn builder<'a>() -> SamplerYcbcrConversionInfoBuilder<'a> {
SamplerYcbcrConversionInfoBuilder {
inner: SamplerYcbcrConversionInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SamplerYcbcrConversionInfoBuilder<'a> {
inner: SamplerYcbcrConversionInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSamplerCreateInfo for SamplerYcbcrConversionInfoBuilder<'_> {}
unsafe impl ExtendsSamplerCreateInfo for SamplerYcbcrConversionInfo {}
unsafe impl ExtendsImageViewCreateInfo for SamplerYcbcrConversionInfoBuilder<'_> {}
unsafe impl ExtendsImageViewCreateInfo for SamplerYcbcrConversionInfo {}
impl<'a> ::std::ops::Deref for SamplerYcbcrConversionInfoBuilder<'a> {
type Target = SamplerYcbcrConversionInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SamplerYcbcrConversionInfoBuilder<'a> {
pub fn conversion(
mut self,
conversion: SamplerYcbcrConversion,
) -> SamplerYcbcrConversionInfoBuilder<'a> {
self.inner.conversion = conversion;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SamplerYcbcrConversionInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrConversionCreateInfo.html>"]
pub struct SamplerYcbcrConversionCreateInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub format: Format,
pub ycbcr_model: SamplerYcbcrModelConversion,
pub ycbcr_range: SamplerYcbcrRange,
pub components: ComponentMapping,
pub x_chroma_offset: ChromaLocation,
pub y_chroma_offset: ChromaLocation,
pub chroma_filter: Filter,
pub force_explicit_reconstruction: Bool32,
}
impl ::std::default::Default for SamplerYcbcrConversionCreateInfo {
fn default() -> SamplerYcbcrConversionCreateInfo {
SamplerYcbcrConversionCreateInfo {
s_type: StructureType::SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
p_next: ::std::ptr::null(),
format: Format::default(),
ycbcr_model: SamplerYcbcrModelConversion::default(),
ycbcr_range: SamplerYcbcrRange::default(),
components: ComponentMapping::default(),
x_chroma_offset: ChromaLocation::default(),
y_chroma_offset: ChromaLocation::default(),
chroma_filter: Filter::default(),
force_explicit_reconstruction: Bool32::default(),
}
}
}
impl SamplerYcbcrConversionCreateInfo {
pub fn builder<'a>() -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
SamplerYcbcrConversionCreateInfoBuilder {
inner: SamplerYcbcrConversionCreateInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SamplerYcbcrConversionCreateInfoBuilder<'a> {
inner: SamplerYcbcrConversionCreateInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSamplerYcbcrConversionCreateInfo {}
impl<'a> ::std::ops::Deref for SamplerYcbcrConversionCreateInfoBuilder<'a> {
type Target = SamplerYcbcrConversionCreateInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionCreateInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SamplerYcbcrConversionCreateInfoBuilder<'a> {
pub fn format(mut self, format: Format) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.format = format;
self
}
pub fn ycbcr_model(
mut self,
ycbcr_model: SamplerYcbcrModelConversion,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.ycbcr_model = ycbcr_model;
self
}
pub fn ycbcr_range(
mut self,
ycbcr_range: SamplerYcbcrRange,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.ycbcr_range = ycbcr_range;
self
}
pub fn components(
mut self,
components: ComponentMapping,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.components = components;
self
}
pub fn x_chroma_offset(
mut self,
x_chroma_offset: ChromaLocation,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.x_chroma_offset = x_chroma_offset;
self
}
pub fn y_chroma_offset(
mut self,
y_chroma_offset: ChromaLocation,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.y_chroma_offset = y_chroma_offset;
self
}
pub fn chroma_filter(
mut self,
chroma_filter: Filter,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.chroma_filter = chroma_filter;
self
}
pub fn force_explicit_reconstruction(
mut self,
force_explicit_reconstruction: bool,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
self.inner.force_explicit_reconstruction = force_explicit_reconstruction.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSamplerYcbcrConversionCreateInfo>(
mut self,
next: &'a mut T,
) -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SamplerYcbcrConversionCreateInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindImagePlaneMemoryInfo.html>"]
pub struct BindImagePlaneMemoryInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub plane_aspect: ImageAspectFlags,
}
impl ::std::default::Default for BindImagePlaneMemoryInfo {
fn default() -> BindImagePlaneMemoryInfo {
BindImagePlaneMemoryInfo {
s_type: StructureType::BIND_IMAGE_PLANE_MEMORY_INFO,
p_next: ::std::ptr::null(),
plane_aspect: ImageAspectFlags::default(),
}
}
}
impl BindImagePlaneMemoryInfo {
pub fn builder<'a>() -> BindImagePlaneMemoryInfoBuilder<'a> {
BindImagePlaneMemoryInfoBuilder {
inner: BindImagePlaneMemoryInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindImagePlaneMemoryInfoBuilder<'a> {
inner: BindImagePlaneMemoryInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBindImageMemoryInfo for BindImagePlaneMemoryInfoBuilder<'_> {}
unsafe impl ExtendsBindImageMemoryInfo for BindImagePlaneMemoryInfo {}
impl<'a> ::std::ops::Deref for BindImagePlaneMemoryInfoBuilder<'a> {
type Target = BindImagePlaneMemoryInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindImagePlaneMemoryInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindImagePlaneMemoryInfoBuilder<'a> {
pub fn plane_aspect(
mut self,
plane_aspect: ImageAspectFlags,
) -> BindImagePlaneMemoryInfoBuilder<'a> {
self.inner.plane_aspect = plane_aspect;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindImagePlaneMemoryInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImagePlaneMemoryRequirementsInfo.html>"]
pub struct ImagePlaneMemoryRequirementsInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub plane_aspect: ImageAspectFlags,
}
impl ::std::default::Default for ImagePlaneMemoryRequirementsInfo {
fn default() -> ImagePlaneMemoryRequirementsInfo {
ImagePlaneMemoryRequirementsInfo {
s_type: StructureType::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
p_next: ::std::ptr::null(),
plane_aspect: ImageAspectFlags::default(),
}
}
}
impl ImagePlaneMemoryRequirementsInfo {
pub fn builder<'a>() -> ImagePlaneMemoryRequirementsInfoBuilder<'a> {
ImagePlaneMemoryRequirementsInfoBuilder {
inner: ImagePlaneMemoryRequirementsInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImagePlaneMemoryRequirementsInfoBuilder<'a> {
inner: ImagePlaneMemoryRequirementsInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageMemoryRequirementsInfo2 for ImagePlaneMemoryRequirementsInfoBuilder<'_> {}
unsafe impl ExtendsImageMemoryRequirementsInfo2 for ImagePlaneMemoryRequirementsInfo {}
impl<'a> ::std::ops::Deref for ImagePlaneMemoryRequirementsInfoBuilder<'a> {
type Target = ImagePlaneMemoryRequirementsInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImagePlaneMemoryRequirementsInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImagePlaneMemoryRequirementsInfoBuilder<'a> {
pub fn plane_aspect(
mut self,
plane_aspect: ImageAspectFlags,
) -> ImagePlaneMemoryRequirementsInfoBuilder<'a> {
self.inner.plane_aspect = plane_aspect;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImagePlaneMemoryRequirementsInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSamplerYcbcrConversionFeatures.html>"]
pub struct PhysicalDeviceSamplerYcbcrConversionFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub sampler_ycbcr_conversion: Bool32,
}
impl ::std::default::Default for PhysicalDeviceSamplerYcbcrConversionFeatures {
fn default() -> PhysicalDeviceSamplerYcbcrConversionFeatures {
PhysicalDeviceSamplerYcbcrConversionFeatures {
s_type: StructureType::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
p_next: ::std::ptr::null_mut(),
sampler_ycbcr_conversion: Bool32::default(),
}
}
}
impl PhysicalDeviceSamplerYcbcrConversionFeatures {
pub fn builder<'a>() -> PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder {
inner: PhysicalDeviceSamplerYcbcrConversionFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
inner: PhysicalDeviceSamplerYcbcrConversionFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSamplerYcbcrConversionFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
type Target = PhysicalDeviceSamplerYcbcrConversionFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
pub fn sampler_ycbcr_conversion(
mut self,
sampler_ycbcr_conversion: bool,
) -> PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
self.inner.sampler_ycbcr_conversion = sampler_ycbcr_conversion.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSamplerYcbcrConversionFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrConversionImageFormatProperties.html>"]
pub struct SamplerYcbcrConversionImageFormatProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub combined_image_sampler_descriptor_count: u32,
}
impl ::std::default::Default for SamplerYcbcrConversionImageFormatProperties {
fn default() -> SamplerYcbcrConversionImageFormatProperties {
SamplerYcbcrConversionImageFormatProperties {
s_type: StructureType::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
p_next: ::std::ptr::null_mut(),
combined_image_sampler_descriptor_count: u32::default(),
}
}
}
impl SamplerYcbcrConversionImageFormatProperties {
pub fn builder<'a>() -> SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
SamplerYcbcrConversionImageFormatPropertiesBuilder {
inner: SamplerYcbcrConversionImageFormatProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
inner: SamplerYcbcrConversionImageFormatProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageFormatProperties2
for SamplerYcbcrConversionImageFormatPropertiesBuilder<'_>
{
}
unsafe impl ExtendsImageFormatProperties2 for SamplerYcbcrConversionImageFormatProperties {}
impl<'a> ::std::ops::Deref for SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
type Target = SamplerYcbcrConversionImageFormatProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
pub fn combined_image_sampler_descriptor_count(
mut self,
combined_image_sampler_descriptor_count: u32,
) -> SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
self.inner.combined_image_sampler_descriptor_count =
combined_image_sampler_descriptor_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SamplerYcbcrConversionImageFormatProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkTextureLODGatherFormatPropertiesAMD.html>"]
pub struct TextureLODGatherFormatPropertiesAMD {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub supports_texture_gather_lod_bias_amd: Bool32,
}
impl ::std::default::Default for TextureLODGatherFormatPropertiesAMD {
fn default() -> TextureLODGatherFormatPropertiesAMD {
TextureLODGatherFormatPropertiesAMD {
s_type: StructureType::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
p_next: ::std::ptr::null_mut(),
supports_texture_gather_lod_bias_amd: Bool32::default(),
}
}
}
impl TextureLODGatherFormatPropertiesAMD {
pub fn builder<'a>() -> TextureLODGatherFormatPropertiesAMDBuilder<'a> {
TextureLODGatherFormatPropertiesAMDBuilder {
inner: TextureLODGatherFormatPropertiesAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct TextureLODGatherFormatPropertiesAMDBuilder<'a> {
inner: TextureLODGatherFormatPropertiesAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageFormatProperties2 for TextureLODGatherFormatPropertiesAMDBuilder<'_> {}
unsafe impl ExtendsImageFormatProperties2 for TextureLODGatherFormatPropertiesAMD {}
impl<'a> ::std::ops::Deref for TextureLODGatherFormatPropertiesAMDBuilder<'a> {
type Target = TextureLODGatherFormatPropertiesAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for TextureLODGatherFormatPropertiesAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> TextureLODGatherFormatPropertiesAMDBuilder<'a> {
pub fn supports_texture_gather_lod_bias_amd(
mut self,
supports_texture_gather_lod_bias_amd: bool,
) -> TextureLODGatherFormatPropertiesAMDBuilder<'a> {
self.inner.supports_texture_gather_lod_bias_amd =
supports_texture_gather_lod_bias_amd.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> TextureLODGatherFormatPropertiesAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkConditionalRenderingBeginInfoEXT.html>"]
pub struct ConditionalRenderingBeginInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub buffer: Buffer,
pub offset: DeviceSize,
pub flags: ConditionalRenderingFlagsEXT,
}
impl ::std::default::Default for ConditionalRenderingBeginInfoEXT {
fn default() -> ConditionalRenderingBeginInfoEXT {
ConditionalRenderingBeginInfoEXT {
s_type: StructureType::CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
p_next: ::std::ptr::null(),
buffer: Buffer::default(),
offset: DeviceSize::default(),
flags: ConditionalRenderingFlagsEXT::default(),
}
}
}
impl ConditionalRenderingBeginInfoEXT {
pub fn builder<'a>() -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
ConditionalRenderingBeginInfoEXTBuilder {
inner: ConditionalRenderingBeginInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ConditionalRenderingBeginInfoEXTBuilder<'a> {
inner: ConditionalRenderingBeginInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsConditionalRenderingBeginInfoEXT {}
impl<'a> ::std::ops::Deref for ConditionalRenderingBeginInfoEXTBuilder<'a> {
type Target = ConditionalRenderingBeginInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ConditionalRenderingBeginInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ConditionalRenderingBeginInfoEXTBuilder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
self.inner.buffer = buffer;
self
}
pub fn offset(mut self, offset: DeviceSize) -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
self.inner.offset = offset;
self
}
pub fn flags(
mut self,
flags: ConditionalRenderingFlagsEXT,
) -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsConditionalRenderingBeginInfoEXT>(
mut self,
next: &'a mut T,
) -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ConditionalRenderingBeginInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkProtectedSubmitInfo.html>"]
pub struct ProtectedSubmitInfo {
pub s_type: StructureType,
pub p_next: *const c_void,
pub protected_submit: Bool32,
}
impl ::std::default::Default for ProtectedSubmitInfo {
fn default() -> ProtectedSubmitInfo {
ProtectedSubmitInfo {
s_type: StructureType::PROTECTED_SUBMIT_INFO,
p_next: ::std::ptr::null(),
protected_submit: Bool32::default(),
}
}
}
impl ProtectedSubmitInfo {
pub fn builder<'a>() -> ProtectedSubmitInfoBuilder<'a> {
ProtectedSubmitInfoBuilder {
inner: ProtectedSubmitInfo::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ProtectedSubmitInfoBuilder<'a> {
inner: ProtectedSubmitInfo,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubmitInfo for ProtectedSubmitInfoBuilder<'_> {}
unsafe impl ExtendsSubmitInfo for ProtectedSubmitInfo {}
impl<'a> ::std::ops::Deref for ProtectedSubmitInfoBuilder<'a> {
type Target = ProtectedSubmitInfo;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ProtectedSubmitInfoBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ProtectedSubmitInfoBuilder<'a> {
pub fn protected_submit(mut self, protected_submit: bool) -> ProtectedSubmitInfoBuilder<'a> {
self.inner.protected_submit = protected_submit.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ProtectedSubmitInfo {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html>"]
pub struct PhysicalDeviceProtectedMemoryFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub protected_memory: Bool32,
}
impl ::std::default::Default for PhysicalDeviceProtectedMemoryFeatures {
fn default() -> PhysicalDeviceProtectedMemoryFeatures {
PhysicalDeviceProtectedMemoryFeatures {
s_type: StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
p_next: ::std::ptr::null_mut(),
protected_memory: Bool32::default(),
}
}
}
impl PhysicalDeviceProtectedMemoryFeatures {
pub fn builder<'a>() -> PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
PhysicalDeviceProtectedMemoryFeaturesBuilder {
inner: PhysicalDeviceProtectedMemoryFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
inner: PhysicalDeviceProtectedMemoryFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProtectedMemoryFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProtectedMemoryFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
type Target = PhysicalDeviceProtectedMemoryFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
pub fn protected_memory(
mut self,
protected_memory: bool,
) -> PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
self.inner.protected_memory = protected_memory.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceProtectedMemoryFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceProtectedMemoryProperties.html>"]
pub struct PhysicalDeviceProtectedMemoryProperties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub protected_no_fault: Bool32,
}
impl ::std::default::Default for PhysicalDeviceProtectedMemoryProperties {
fn default() -> PhysicalDeviceProtectedMemoryProperties {
PhysicalDeviceProtectedMemoryProperties {
s_type: StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
p_next: ::std::ptr::null_mut(),
protected_no_fault: Bool32::default(),
}
}
}
impl PhysicalDeviceProtectedMemoryProperties {
pub fn builder<'a>() -> PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
PhysicalDeviceProtectedMemoryPropertiesBuilder {
inner: PhysicalDeviceProtectedMemoryProperties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
inner: PhysicalDeviceProtectedMemoryProperties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceProtectedMemoryPropertiesBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceProtectedMemoryProperties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
type Target = PhysicalDeviceProtectedMemoryProperties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
pub fn protected_no_fault(
mut self,
protected_no_fault: bool,
) -> PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
self.inner.protected_no_fault = protected_no_fault.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceProtectedMemoryProperties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceQueueInfo2.html>"]
pub struct DeviceQueueInfo2 {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DeviceQueueCreateFlags,
pub queue_family_index: u32,
pub queue_index: u32,
}
impl ::std::default::Default for DeviceQueueInfo2 {
fn default() -> DeviceQueueInfo2 {
DeviceQueueInfo2 {
s_type: StructureType::DEVICE_QUEUE_INFO_2,
p_next: ::std::ptr::null(),
flags: DeviceQueueCreateFlags::default(),
queue_family_index: u32::default(),
queue_index: u32::default(),
}
}
}
impl DeviceQueueInfo2 {
pub fn builder<'a>() -> DeviceQueueInfo2Builder<'a> {
DeviceQueueInfo2Builder {
inner: DeviceQueueInfo2::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceQueueInfo2Builder<'a> {
inner: DeviceQueueInfo2,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDeviceQueueInfo2 {}
impl<'a> ::std::ops::Deref for DeviceQueueInfo2Builder<'a> {
type Target = DeviceQueueInfo2;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceQueueInfo2Builder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceQueueInfo2Builder<'a> {
pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> DeviceQueueInfo2Builder<'a> {
self.inner.flags = flags;
self
}
pub fn queue_family_index(mut self, queue_family_index: u32) -> DeviceQueueInfo2Builder<'a> {
self.inner.queue_family_index = queue_family_index;
self
}
pub fn queue_index(mut self, queue_index: u32) -> DeviceQueueInfo2Builder<'a> {
self.inner.queue_index = queue_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDeviceQueueInfo2>(
mut self,
next: &'a mut T,
) -> DeviceQueueInfo2Builder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceQueueInfo2 {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCoverageToColorStateCreateInfoNV.html>"]
pub struct PipelineCoverageToColorStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCoverageToColorStateCreateFlagsNV,
pub coverage_to_color_enable: Bool32,
pub coverage_to_color_location: u32,
}
impl ::std::default::Default for PipelineCoverageToColorStateCreateInfoNV {
fn default() -> PipelineCoverageToColorStateCreateInfoNV {
PipelineCoverageToColorStateCreateInfoNV {
s_type: StructureType::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
flags: PipelineCoverageToColorStateCreateFlagsNV::default(),
coverage_to_color_enable: Bool32::default(),
coverage_to_color_location: u32::default(),
}
}
}
impl PipelineCoverageToColorStateCreateInfoNV {
pub fn builder<'a>() -> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
PipelineCoverageToColorStateCreateInfoNVBuilder {
inner: PipelineCoverageToColorStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
inner: PipelineCoverageToColorStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo
for PipelineCoverageToColorStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineCoverageToColorStateCreateInfoNV {}
impl<'a> ::std::ops::Deref for PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
type Target = PipelineCoverageToColorStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineCoverageToColorStateCreateFlagsNV,
) -> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn coverage_to_color_enable(
mut self,
coverage_to_color_enable: bool,
) -> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
self.inner.coverage_to_color_enable = coverage_to_color_enable.into();
self
}
pub fn coverage_to_color_location(
mut self,
coverage_to_color_location: u32,
) -> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
self.inner.coverage_to_color_location = coverage_to_color_location;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineCoverageToColorStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT.html>"]
pub struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub filter_minmax_single_component_formats: Bool32,
pub filter_minmax_image_component_mapping: Bool32,
}
impl ::std::default::Default for PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
fn default() -> PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
filter_minmax_single_component_formats: Bool32::default(),
filter_minmax_image_component_mapping: Bool32::default(),
}
}
}
impl PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder {
inner: PhysicalDeviceSamplerFilterMinmaxPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceSamplerFilterMinmaxPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
pub fn filter_minmax_single_component_formats(
mut self,
filter_minmax_single_component_formats: bool,
) -> PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
self.inner.filter_minmax_single_component_formats =
filter_minmax_single_component_formats.into();
self
}
pub fn filter_minmax_image_component_mapping(
mut self,
filter_minmax_image_component_mapping: bool,
) -> PhysicalDeviceSamplerFilterMinmaxPropertiesEXTBuilder<'a> {
self.inner.filter_minmax_image_component_mapping =
filter_minmax_image_component_mapping.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSampleLocationEXT.html>"]
pub struct SampleLocationEXT {
pub x: f32,
pub y: f32,
}
impl SampleLocationEXT {
pub fn builder<'a>() -> SampleLocationEXTBuilder<'a> {
SampleLocationEXTBuilder {
inner: SampleLocationEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SampleLocationEXTBuilder<'a> {
inner: SampleLocationEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SampleLocationEXTBuilder<'a> {
type Target = SampleLocationEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SampleLocationEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SampleLocationEXTBuilder<'a> {
pub fn x(mut self, x: f32) -> SampleLocationEXTBuilder<'a> {
self.inner.x = x;
self
}
pub fn y(mut self, y: f32) -> SampleLocationEXTBuilder<'a> {
self.inner.y = y;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SampleLocationEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSampleLocationsInfoEXT.html>"]
pub struct SampleLocationsInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub sample_locations_per_pixel: SampleCountFlags,
pub sample_location_grid_size: Extent2D,
pub sample_locations_count: u32,
pub p_sample_locations: *const SampleLocationEXT,
}
impl ::std::default::Default for SampleLocationsInfoEXT {
fn default() -> SampleLocationsInfoEXT {
SampleLocationsInfoEXT {
s_type: StructureType::SAMPLE_LOCATIONS_INFO_EXT,
p_next: ::std::ptr::null(),
sample_locations_per_pixel: SampleCountFlags::default(),
sample_location_grid_size: Extent2D::default(),
sample_locations_count: u32::default(),
p_sample_locations: ::std::ptr::null(),
}
}
}
impl SampleLocationsInfoEXT {
pub fn builder<'a>() -> SampleLocationsInfoEXTBuilder<'a> {
SampleLocationsInfoEXTBuilder {
inner: SampleLocationsInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SampleLocationsInfoEXTBuilder<'a> {
inner: SampleLocationsInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXTBuilder<'_> {}
unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXT {}
impl<'a> ::std::ops::Deref for SampleLocationsInfoEXTBuilder<'a> {
type Target = SampleLocationsInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SampleLocationsInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SampleLocationsInfoEXTBuilder<'a> {
pub fn sample_locations_per_pixel(
mut self,
sample_locations_per_pixel: SampleCountFlags,
) -> SampleLocationsInfoEXTBuilder<'a> {
self.inner.sample_locations_per_pixel = sample_locations_per_pixel;
self
}
pub fn sample_location_grid_size(
mut self,
sample_location_grid_size: Extent2D,
) -> SampleLocationsInfoEXTBuilder<'a> {
self.inner.sample_location_grid_size = sample_location_grid_size;
self
}
pub fn sample_locations(
mut self,
sample_locations: &'a [SampleLocationEXT],
) -> SampleLocationsInfoEXTBuilder<'a> {
self.inner.sample_locations_count = sample_locations.len() as _;
self.inner.p_sample_locations = sample_locations.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SampleLocationsInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentSampleLocationsEXT.html>"]
pub struct AttachmentSampleLocationsEXT {
pub attachment_index: u32,
pub sample_locations_info: SampleLocationsInfoEXT,
}
impl AttachmentSampleLocationsEXT {
pub fn builder<'a>() -> AttachmentSampleLocationsEXTBuilder<'a> {
AttachmentSampleLocationsEXTBuilder {
inner: AttachmentSampleLocationsEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AttachmentSampleLocationsEXTBuilder<'a> {
inner: AttachmentSampleLocationsEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for AttachmentSampleLocationsEXTBuilder<'a> {
type Target = AttachmentSampleLocationsEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AttachmentSampleLocationsEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AttachmentSampleLocationsEXTBuilder<'a> {
pub fn attachment_index(
mut self,
attachment_index: u32,
) -> AttachmentSampleLocationsEXTBuilder<'a> {
self.inner.attachment_index = attachment_index;
self
}
pub fn sample_locations_info(
mut self,
sample_locations_info: SampleLocationsInfoEXT,
) -> AttachmentSampleLocationsEXTBuilder<'a> {
self.inner.sample_locations_info = sample_locations_info;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AttachmentSampleLocationsEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassSampleLocationsEXT.html>"]
pub struct SubpassSampleLocationsEXT {
pub subpass_index: u32,
pub sample_locations_info: SampleLocationsInfoEXT,
}
impl SubpassSampleLocationsEXT {
pub fn builder<'a>() -> SubpassSampleLocationsEXTBuilder<'a> {
SubpassSampleLocationsEXTBuilder {
inner: SubpassSampleLocationsEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassSampleLocationsEXTBuilder<'a> {
inner: SubpassSampleLocationsEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for SubpassSampleLocationsEXTBuilder<'a> {
type Target = SubpassSampleLocationsEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassSampleLocationsEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassSampleLocationsEXTBuilder<'a> {
pub fn subpass_index(mut self, subpass_index: u32) -> SubpassSampleLocationsEXTBuilder<'a> {
self.inner.subpass_index = subpass_index;
self
}
pub fn sample_locations_info(
mut self,
sample_locations_info: SampleLocationsInfoEXT,
) -> SubpassSampleLocationsEXTBuilder<'a> {
self.inner.sample_locations_info = sample_locations_info;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassSampleLocationsEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassSampleLocationsBeginInfoEXT.html>"]
pub struct RenderPassSampleLocationsBeginInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub attachment_initial_sample_locations_count: u32,
pub p_attachment_initial_sample_locations: *const AttachmentSampleLocationsEXT,
pub post_subpass_sample_locations_count: u32,
pub p_post_subpass_sample_locations: *const SubpassSampleLocationsEXT,
}
impl ::std::default::Default for RenderPassSampleLocationsBeginInfoEXT {
fn default() -> RenderPassSampleLocationsBeginInfoEXT {
RenderPassSampleLocationsBeginInfoEXT {
s_type: StructureType::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
p_next: ::std::ptr::null(),
attachment_initial_sample_locations_count: u32::default(),
p_attachment_initial_sample_locations: ::std::ptr::null(),
post_subpass_sample_locations_count: u32::default(),
p_post_subpass_sample_locations: ::std::ptr::null(),
}
}
}
impl RenderPassSampleLocationsBeginInfoEXT {
pub fn builder<'a>() -> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
RenderPassSampleLocationsBeginInfoEXTBuilder {
inner: RenderPassSampleLocationsBeginInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
inner: RenderPassSampleLocationsBeginInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsRenderPassBeginInfo for RenderPassSampleLocationsBeginInfoEXTBuilder<'_> {}
unsafe impl ExtendsRenderPassBeginInfo for RenderPassSampleLocationsBeginInfoEXT {}
impl<'a> ::std::ops::Deref for RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
type Target = RenderPassSampleLocationsBeginInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
pub fn attachment_initial_sample_locations(
mut self,
attachment_initial_sample_locations: &'a [AttachmentSampleLocationsEXT],
) -> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
self.inner.attachment_initial_sample_locations_count =
attachment_initial_sample_locations.len() as _;
self.inner.p_attachment_initial_sample_locations =
attachment_initial_sample_locations.as_ptr();
self
}
pub fn post_subpass_sample_locations(
mut self,
post_subpass_sample_locations: &'a [SubpassSampleLocationsEXT],
) -> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
self.inner.post_subpass_sample_locations_count = post_subpass_sample_locations.len() as _;
self.inner.p_post_subpass_sample_locations = post_subpass_sample_locations.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassSampleLocationsBeginInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineSampleLocationsStateCreateInfoEXT.html>"]
pub struct PipelineSampleLocationsStateCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub sample_locations_enable: Bool32,
pub sample_locations_info: SampleLocationsInfoEXT,
}
impl ::std::default::Default for PipelineSampleLocationsStateCreateInfoEXT {
fn default() -> PipelineSampleLocationsStateCreateInfoEXT {
PipelineSampleLocationsStateCreateInfoEXT {
s_type: StructureType::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
sample_locations_enable: Bool32::default(),
sample_locations_info: SampleLocationsInfoEXT::default(),
}
}
}
impl PipelineSampleLocationsStateCreateInfoEXT {
pub fn builder<'a>() -> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
PipelineSampleLocationsStateCreateInfoEXTBuilder {
inner: PipelineSampleLocationsStateCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
inner: PipelineSampleLocationsStateCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo
for PipelineSampleLocationsStateCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo
for PipelineSampleLocationsStateCreateInfoEXT
{
}
impl<'a> ::std::ops::Deref for PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
type Target = PipelineSampleLocationsStateCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
pub fn sample_locations_enable(
mut self,
sample_locations_enable: bool,
) -> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
self.inner.sample_locations_enable = sample_locations_enable.into();
self
}
pub fn sample_locations_info(
mut self,
sample_locations_info: SampleLocationsInfoEXT,
) -> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
self.inner.sample_locations_info = sample_locations_info;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineSampleLocationsStateCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceSampleLocationsPropertiesEXT.html>"]
pub struct PhysicalDeviceSampleLocationsPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub sample_location_sample_counts: SampleCountFlags,
pub max_sample_location_grid_size: Extent2D,
pub sample_location_coordinate_range: [f32; 2],
pub sample_location_sub_pixel_bits: u32,
pub variable_sample_locations: Bool32,
}
impl ::std::default::Default for PhysicalDeviceSampleLocationsPropertiesEXT {
fn default() -> PhysicalDeviceSampleLocationsPropertiesEXT {
PhysicalDeviceSampleLocationsPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
sample_location_sample_counts: SampleCountFlags::default(),
max_sample_location_grid_size: Extent2D::default(),
sample_location_coordinate_range: unsafe { ::std::mem::zeroed() },
sample_location_sub_pixel_bits: u32::default(),
variable_sample_locations: Bool32::default(),
}
}
}
impl PhysicalDeviceSampleLocationsPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
PhysicalDeviceSampleLocationsPropertiesEXTBuilder {
inner: PhysicalDeviceSampleLocationsPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceSampleLocationsPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSampleLocationsPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceSampleLocationsPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
pub fn sample_location_sample_counts(
mut self,
sample_location_sample_counts: SampleCountFlags,
) -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
self.inner.sample_location_sample_counts = sample_location_sample_counts;
self
}
pub fn max_sample_location_grid_size(
mut self,
max_sample_location_grid_size: Extent2D,
) -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
self.inner.max_sample_location_grid_size = max_sample_location_grid_size;
self
}
pub fn sample_location_coordinate_range(
mut self,
sample_location_coordinate_range: [f32; 2],
) -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
self.inner.sample_location_coordinate_range = sample_location_coordinate_range;
self
}
pub fn sample_location_sub_pixel_bits(
mut self,
sample_location_sub_pixel_bits: u32,
) -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
self.inner.sample_location_sub_pixel_bits = sample_location_sub_pixel_bits;
self
}
pub fn variable_sample_locations(
mut self,
variable_sample_locations: bool,
) -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
self.inner.variable_sample_locations = variable_sample_locations.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceSampleLocationsPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMultisamplePropertiesEXT.html>"]
pub struct MultisamplePropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_sample_location_grid_size: Extent2D,
}
impl ::std::default::Default for MultisamplePropertiesEXT {
fn default() -> MultisamplePropertiesEXT {
MultisamplePropertiesEXT {
s_type: StructureType::MULTISAMPLE_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_sample_location_grid_size: Extent2D::default(),
}
}
}
impl MultisamplePropertiesEXT {
pub fn builder<'a>() -> MultisamplePropertiesEXTBuilder<'a> {
MultisamplePropertiesEXTBuilder {
inner: MultisamplePropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MultisamplePropertiesEXTBuilder<'a> {
inner: MultisamplePropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMultisamplePropertiesEXT {}
impl<'a> ::std::ops::Deref for MultisamplePropertiesEXTBuilder<'a> {
type Target = MultisamplePropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MultisamplePropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MultisamplePropertiesEXTBuilder<'a> {
pub fn max_sample_location_grid_size(
mut self,
max_sample_location_grid_size: Extent2D,
) -> MultisamplePropertiesEXTBuilder<'a> {
self.inner.max_sample_location_grid_size = max_sample_location_grid_size;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMultisamplePropertiesEXT>(
mut self,
next: &'a mut T,
) -> MultisamplePropertiesEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MultisamplePropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerReductionModeCreateInfoEXT.html>"]
pub struct SamplerReductionModeCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub reduction_mode: SamplerReductionModeEXT,
}
impl ::std::default::Default for SamplerReductionModeCreateInfoEXT {
fn default() -> SamplerReductionModeCreateInfoEXT {
SamplerReductionModeCreateInfoEXT {
s_type: StructureType::SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
reduction_mode: SamplerReductionModeEXT::default(),
}
}
}
impl SamplerReductionModeCreateInfoEXT {
pub fn builder<'a>() -> SamplerReductionModeCreateInfoEXTBuilder<'a> {
SamplerReductionModeCreateInfoEXTBuilder {
inner: SamplerReductionModeCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SamplerReductionModeCreateInfoEXTBuilder<'a> {
inner: SamplerReductionModeCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSamplerCreateInfo for SamplerReductionModeCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsSamplerCreateInfo for SamplerReductionModeCreateInfoEXT {}
impl<'a> ::std::ops::Deref for SamplerReductionModeCreateInfoEXTBuilder<'a> {
type Target = SamplerReductionModeCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SamplerReductionModeCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SamplerReductionModeCreateInfoEXTBuilder<'a> {
pub fn reduction_mode(
mut self,
reduction_mode: SamplerReductionModeEXT,
) -> SamplerReductionModeCreateInfoEXTBuilder<'a> {
self.inner.reduction_mode = reduction_mode;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SamplerReductionModeCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html>"]
pub struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub advanced_blend_coherent_operations: Bool32,
}
impl ::std::default::Default for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
fn default() -> PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
advanced_blend_coherent_operations: Bool32::default(),
}
}
}
impl PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder {
inner: PhysicalDeviceBlendOperationAdvancedFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceBlendOperationAdvancedFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
pub fn advanced_blend_coherent_operations(
mut self,
advanced_blend_coherent_operations: bool,
) -> PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
self.inner.advanced_blend_coherent_operations = advanced_blend_coherent_operations.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html>"]
pub struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub advanced_blend_max_color_attachments: u32,
pub advanced_blend_independent_blend: Bool32,
pub advanced_blend_non_premultiplied_src_color: Bool32,
pub advanced_blend_non_premultiplied_dst_color: Bool32,
pub advanced_blend_correlated_overlap: Bool32,
pub advanced_blend_all_operations: Bool32,
}
impl ::std::default::Default for PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
fn default() -> PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
advanced_blend_max_color_attachments: u32::default(),
advanced_blend_independent_blend: Bool32::default(),
advanced_blend_non_premultiplied_src_color: Bool32::default(),
advanced_blend_non_premultiplied_dst_color: Bool32::default(),
advanced_blend_correlated_overlap: Bool32::default(),
advanced_blend_all_operations: Bool32::default(),
}
}
}
impl PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder {
inner: PhysicalDeviceBlendOperationAdvancedPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceBlendOperationAdvancedPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceBlendOperationAdvancedPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
pub fn advanced_blend_max_color_attachments(
mut self,
advanced_blend_max_color_attachments: u32,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_max_color_attachments = advanced_blend_max_color_attachments;
self
}
pub fn advanced_blend_independent_blend(
mut self,
advanced_blend_independent_blend: bool,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_independent_blend = advanced_blend_independent_blend.into();
self
}
pub fn advanced_blend_non_premultiplied_src_color(
mut self,
advanced_blend_non_premultiplied_src_color: bool,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_non_premultiplied_src_color =
advanced_blend_non_premultiplied_src_color.into();
self
}
pub fn advanced_blend_non_premultiplied_dst_color(
mut self,
advanced_blend_non_premultiplied_dst_color: bool,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_non_premultiplied_dst_color =
advanced_blend_non_premultiplied_dst_color.into();
self
}
pub fn advanced_blend_correlated_overlap(
mut self,
advanced_blend_correlated_overlap: bool,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_correlated_overlap = advanced_blend_correlated_overlap.into();
self
}
pub fn advanced_blend_all_operations(
mut self,
advanced_blend_all_operations: bool,
) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
self.inner.advanced_blend_all_operations = advanced_blend_all_operations.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineColorBlendAdvancedStateCreateInfoEXT.html>"]
pub struct PipelineColorBlendAdvancedStateCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_premultiplied: Bool32,
pub dst_premultiplied: Bool32,
pub blend_overlap: BlendOverlapEXT,
}
impl ::std::default::Default for PipelineColorBlendAdvancedStateCreateInfoEXT {
fn default() -> PipelineColorBlendAdvancedStateCreateInfoEXT {
PipelineColorBlendAdvancedStateCreateInfoEXT {
s_type: StructureType::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
src_premultiplied: Bool32::default(),
dst_premultiplied: Bool32::default(),
blend_overlap: BlendOverlapEXT::default(),
}
}
}
impl PipelineColorBlendAdvancedStateCreateInfoEXT {
pub fn builder<'a>() -> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
PipelineColorBlendAdvancedStateCreateInfoEXTBuilder {
inner: PipelineColorBlendAdvancedStateCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
inner: PipelineColorBlendAdvancedStateCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineColorBlendStateCreateInfo
for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPipelineColorBlendStateCreateInfo
for PipelineColorBlendAdvancedStateCreateInfoEXT
{
}
impl<'a> ::std::ops::Deref for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
type Target = PipelineColorBlendAdvancedStateCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
pub fn src_premultiplied(
mut self,
src_premultiplied: bool,
) -> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
self.inner.src_premultiplied = src_premultiplied.into();
self
}
pub fn dst_premultiplied(
mut self,
dst_premultiplied: bool,
) -> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
self.inner.dst_premultiplied = dst_premultiplied.into();
self
}
pub fn blend_overlap(
mut self,
blend_overlap: BlendOverlapEXT,
) -> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
self.inner.blend_overlap = blend_overlap;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineColorBlendAdvancedStateCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceInlineUniformBlockFeaturesEXT.html>"]
pub struct PhysicalDeviceInlineUniformBlockFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub inline_uniform_block: Bool32,
pub descriptor_binding_inline_uniform_block_update_after_bind: Bool32,
}
impl ::std::default::Default for PhysicalDeviceInlineUniformBlockFeaturesEXT {
fn default() -> PhysicalDeviceInlineUniformBlockFeaturesEXT {
PhysicalDeviceInlineUniformBlockFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
inline_uniform_block: Bool32::default(),
descriptor_binding_inline_uniform_block_update_after_bind: Bool32::default(),
}
}
}
impl PhysicalDeviceInlineUniformBlockFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder {
inner: PhysicalDeviceInlineUniformBlockFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceInlineUniformBlockFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInlineUniformBlockFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceInlineUniformBlockFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
pub fn inline_uniform_block(
mut self,
inline_uniform_block: bool,
) -> PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
self.inner.inline_uniform_block = inline_uniform_block.into();
self
}
pub fn descriptor_binding_inline_uniform_block_update_after_bind(
mut self,
descriptor_binding_inline_uniform_block_update_after_bind: bool,
) -> PhysicalDeviceInlineUniformBlockFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_inline_uniform_block_update_after_bind =
descriptor_binding_inline_uniform_block_update_after_bind.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceInlineUniformBlockFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceInlineUniformBlockPropertiesEXT.html>"]
pub struct PhysicalDeviceInlineUniformBlockPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_inline_uniform_block_size: u32,
pub max_per_stage_descriptor_inline_uniform_blocks: u32,
pub max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
pub max_descriptor_set_inline_uniform_blocks: u32,
pub max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
}
impl ::std::default::Default for PhysicalDeviceInlineUniformBlockPropertiesEXT {
fn default() -> PhysicalDeviceInlineUniformBlockPropertiesEXT {
PhysicalDeviceInlineUniformBlockPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_inline_uniform_block_size: u32::default(),
max_per_stage_descriptor_inline_uniform_blocks: u32::default(),
max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32::default(),
max_descriptor_set_inline_uniform_blocks: u32::default(),
max_descriptor_set_update_after_bind_inline_uniform_blocks: u32::default(),
}
}
}
impl PhysicalDeviceInlineUniformBlockPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder {
inner: PhysicalDeviceInlineUniformBlockPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceInlineUniformBlockPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceInlineUniformBlockPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceInlineUniformBlockPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
pub fn max_inline_uniform_block_size(
mut self,
max_inline_uniform_block_size: u32,
) -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
self.inner.max_inline_uniform_block_size = max_inline_uniform_block_size;
self
}
pub fn max_per_stage_descriptor_inline_uniform_blocks(
mut self,
max_per_stage_descriptor_inline_uniform_blocks: u32,
) -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
self.inner.max_per_stage_descriptor_inline_uniform_blocks =
max_per_stage_descriptor_inline_uniform_blocks;
self
}
pub fn max_per_stage_descriptor_update_after_bind_inline_uniform_blocks(
mut self,
max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
) -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_inline_uniform_blocks =
max_per_stage_descriptor_update_after_bind_inline_uniform_blocks;
self
}
pub fn max_descriptor_set_inline_uniform_blocks(
mut self,
max_descriptor_set_inline_uniform_blocks: u32,
) -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
self.inner.max_descriptor_set_inline_uniform_blocks =
max_descriptor_set_inline_uniform_blocks;
self
}
pub fn max_descriptor_set_update_after_bind_inline_uniform_blocks(
mut self,
max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
) -> PhysicalDeviceInlineUniformBlockPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_inline_uniform_blocks =
max_descriptor_set_update_after_bind_inline_uniform_blocks;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceInlineUniformBlockPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWriteDescriptorSetInlineUniformBlockEXT.html>"]
pub struct WriteDescriptorSetInlineUniformBlockEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub data_size: u32,
pub p_data: *const c_void,
}
impl ::std::default::Default for WriteDescriptorSetInlineUniformBlockEXT {
fn default() -> WriteDescriptorSetInlineUniformBlockEXT {
WriteDescriptorSetInlineUniformBlockEXT {
s_type: StructureType::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
p_next: ::std::ptr::null(),
data_size: u32::default(),
p_data: ::std::ptr::null(),
}
}
}
impl WriteDescriptorSetInlineUniformBlockEXT {
pub fn builder<'a>() -> WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
WriteDescriptorSetInlineUniformBlockEXTBuilder {
inner: WriteDescriptorSetInlineUniformBlockEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
inner: WriteDescriptorSetInlineUniformBlockEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetInlineUniformBlockEXTBuilder<'_> {}
unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetInlineUniformBlockEXT {}
impl<'a> ::std::ops::Deref for WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
type Target = WriteDescriptorSetInlineUniformBlockEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
pub fn data(mut self, data: &'a [u8]) -> WriteDescriptorSetInlineUniformBlockEXTBuilder<'a> {
self.inner.data_size = data.len() as _;
self.inner.p_data = data.as_ptr() as *const c_void;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> WriteDescriptorSetInlineUniformBlockEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPoolInlineUniformBlockCreateInfoEXT.html>"]
pub struct DescriptorPoolInlineUniformBlockCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub max_inline_uniform_block_bindings: u32,
}
impl ::std::default::Default for DescriptorPoolInlineUniformBlockCreateInfoEXT {
fn default() -> DescriptorPoolInlineUniformBlockCreateInfoEXT {
DescriptorPoolInlineUniformBlockCreateInfoEXT {
s_type: StructureType::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
max_inline_uniform_block_bindings: u32::default(),
}
}
}
impl DescriptorPoolInlineUniformBlockCreateInfoEXT {
pub fn builder<'a>() -> DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder {
inner: DescriptorPoolInlineUniformBlockCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
inner: DescriptorPoolInlineUniformBlockCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDescriptorPoolCreateInfo
for DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsDescriptorPoolCreateInfo for DescriptorPoolInlineUniformBlockCreateInfoEXT {}
impl<'a> ::std::ops::Deref for DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
type Target = DescriptorPoolInlineUniformBlockCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
pub fn max_inline_uniform_block_bindings(
mut self,
max_inline_uniform_block_bindings: u32,
) -> DescriptorPoolInlineUniformBlockCreateInfoEXTBuilder<'a> {
self.inner.max_inline_uniform_block_bindings = max_inline_uniform_block_bindings;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorPoolInlineUniformBlockCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCoverageModulationStateCreateInfoNV.html>"]
pub struct PipelineCoverageModulationStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCoverageModulationStateCreateFlagsNV,
pub coverage_modulation_mode: CoverageModulationModeNV,
pub coverage_modulation_table_enable: Bool32,
pub coverage_modulation_table_count: u32,
pub p_coverage_modulation_table: *const f32,
}
impl ::std::default::Default for PipelineCoverageModulationStateCreateInfoNV {
fn default() -> PipelineCoverageModulationStateCreateInfoNV {
PipelineCoverageModulationStateCreateInfoNV {
s_type: StructureType::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
flags: PipelineCoverageModulationStateCreateFlagsNV::default(),
coverage_modulation_mode: CoverageModulationModeNV::default(),
coverage_modulation_table_enable: Bool32::default(),
coverage_modulation_table_count: u32::default(),
p_coverage_modulation_table: ::std::ptr::null(),
}
}
}
impl PipelineCoverageModulationStateCreateInfoNV {
pub fn builder<'a>() -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
PipelineCoverageModulationStateCreateInfoNVBuilder {
inner: PipelineCoverageModulationStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
inner: PipelineCoverageModulationStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo
for PipelineCoverageModulationStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineMultisampleStateCreateInfo
for PipelineCoverageModulationStateCreateInfoNV
{
}
impl<'a> ::std::ops::Deref for PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
type Target = PipelineCoverageModulationStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineCoverageModulationStateCreateFlagsNV,
) -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn coverage_modulation_mode(
mut self,
coverage_modulation_mode: CoverageModulationModeNV,
) -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
self.inner.coverage_modulation_mode = coverage_modulation_mode;
self
}
pub fn coverage_modulation_table_enable(
mut self,
coverage_modulation_table_enable: bool,
) -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
self.inner.coverage_modulation_table_enable = coverage_modulation_table_enable.into();
self
}
pub fn coverage_modulation_table(
mut self,
coverage_modulation_table: &'a [f32],
) -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
self.inner.coverage_modulation_table_count = coverage_modulation_table.len() as _;
self.inner.p_coverage_modulation_table = coverage_modulation_table.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineCoverageModulationStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageFormatListCreateInfoKHR.html>"]
pub struct ImageFormatListCreateInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub view_format_count: u32,
pub p_view_formats: *const Format,
}
impl ::std::default::Default for ImageFormatListCreateInfoKHR {
fn default() -> ImageFormatListCreateInfoKHR {
ImageFormatListCreateInfoKHR {
s_type: StructureType::IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
p_next: ::std::ptr::null(),
view_format_count: u32::default(),
p_view_formats: ::std::ptr::null(),
}
}
}
impl ImageFormatListCreateInfoKHR {
pub fn builder<'a>() -> ImageFormatListCreateInfoKHRBuilder<'a> {
ImageFormatListCreateInfoKHRBuilder {
inner: ImageFormatListCreateInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageFormatListCreateInfoKHRBuilder<'a> {
inner: ImageFormatListCreateInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ImageFormatListCreateInfoKHRBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ImageFormatListCreateInfoKHR {}
unsafe impl ExtendsSwapchainCreateInfoKHR for ImageFormatListCreateInfoKHRBuilder<'_> {}
unsafe impl ExtendsSwapchainCreateInfoKHR for ImageFormatListCreateInfoKHR {}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageFormatListCreateInfoKHRBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageFormatListCreateInfoKHR {}
impl<'a> ::std::ops::Deref for ImageFormatListCreateInfoKHRBuilder<'a> {
type Target = ImageFormatListCreateInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageFormatListCreateInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageFormatListCreateInfoKHRBuilder<'a> {
pub fn view_formats(
mut self,
view_formats: &'a [Format],
) -> ImageFormatListCreateInfoKHRBuilder<'a> {
self.inner.view_format_count = view_formats.len() as _;
self.inner.p_view_formats = view_formats.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageFormatListCreateInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationCacheCreateInfoEXT.html>"]
pub struct ValidationCacheCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: ValidationCacheCreateFlagsEXT,
pub initial_data_size: usize,
pub p_initial_data: *const c_void,
}
impl ::std::default::Default for ValidationCacheCreateInfoEXT {
fn default() -> ValidationCacheCreateInfoEXT {
ValidationCacheCreateInfoEXT {
s_type: StructureType::VALIDATION_CACHE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: ValidationCacheCreateFlagsEXT::default(),
initial_data_size: usize::default(),
p_initial_data: ::std::ptr::null(),
}
}
}
impl ValidationCacheCreateInfoEXT {
pub fn builder<'a>() -> ValidationCacheCreateInfoEXTBuilder<'a> {
ValidationCacheCreateInfoEXTBuilder {
inner: ValidationCacheCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ValidationCacheCreateInfoEXTBuilder<'a> {
inner: ValidationCacheCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsValidationCacheCreateInfoEXT {}
impl<'a> ::std::ops::Deref for ValidationCacheCreateInfoEXTBuilder<'a> {
type Target = ValidationCacheCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ValidationCacheCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ValidationCacheCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: ValidationCacheCreateFlagsEXT,
) -> ValidationCacheCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn initial_data(
mut self,
initial_data: &'a [u8],
) -> ValidationCacheCreateInfoEXTBuilder<'a> {
self.inner.initial_data_size = initial_data.len() as _;
self.inner.p_initial_data = initial_data.as_ptr() as *const c_void;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsValidationCacheCreateInfoEXT>(
mut self,
next: &'a mut T,
) -> ValidationCacheCreateInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ValidationCacheCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderModuleValidationCacheCreateInfoEXT.html>"]
pub struct ShaderModuleValidationCacheCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub validation_cache: ValidationCacheEXT,
}
impl ::std::default::Default for ShaderModuleValidationCacheCreateInfoEXT {
fn default() -> ShaderModuleValidationCacheCreateInfoEXT {
ShaderModuleValidationCacheCreateInfoEXT {
s_type: StructureType::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
validation_cache: ValidationCacheEXT::default(),
}
}
}
impl ShaderModuleValidationCacheCreateInfoEXT {
pub fn builder<'a>() -> ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
ShaderModuleValidationCacheCreateInfoEXTBuilder {
inner: ShaderModuleValidationCacheCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
inner: ShaderModuleValidationCacheCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsShaderModuleCreateInfo for ShaderModuleValidationCacheCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsShaderModuleCreateInfo for ShaderModuleValidationCacheCreateInfoEXT {}
impl<'a> ::std::ops::Deref for ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
type Target = ShaderModuleValidationCacheCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
pub fn validation_cache(
mut self,
validation_cache: ValidationCacheEXT,
) -> ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
self.inner.validation_cache = validation_cache;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ShaderModuleValidationCacheCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMaintenance3Properties.html>"]
pub struct PhysicalDeviceMaintenance3Properties {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_per_set_descriptors: u32,
pub max_memory_allocation_size: DeviceSize,
}
impl ::std::default::Default for PhysicalDeviceMaintenance3Properties {
fn default() -> PhysicalDeviceMaintenance3Properties {
PhysicalDeviceMaintenance3Properties {
s_type: StructureType::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
p_next: ::std::ptr::null_mut(),
max_per_set_descriptors: u32::default(),
max_memory_allocation_size: DeviceSize::default(),
}
}
}
impl PhysicalDeviceMaintenance3Properties {
pub fn builder<'a>() -> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
PhysicalDeviceMaintenance3PropertiesBuilder {
inner: PhysicalDeviceMaintenance3Properties::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
inner: PhysicalDeviceMaintenance3Properties,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance3PropertiesBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance3Properties {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
type Target = PhysicalDeviceMaintenance3Properties;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
pub fn max_per_set_descriptors(
mut self,
max_per_set_descriptors: u32,
) -> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
self.inner.max_per_set_descriptors = max_per_set_descriptors;
self
}
pub fn max_memory_allocation_size(
mut self,
max_memory_allocation_size: DeviceSize,
) -> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
self.inner.max_memory_allocation_size = max_memory_allocation_size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMaintenance3Properties {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayoutSupport.html>"]
pub struct DescriptorSetLayoutSupport {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub supported: Bool32,
}
impl ::std::default::Default for DescriptorSetLayoutSupport {
fn default() -> DescriptorSetLayoutSupport {
DescriptorSetLayoutSupport {
s_type: StructureType::DESCRIPTOR_SET_LAYOUT_SUPPORT,
p_next: ::std::ptr::null_mut(),
supported: Bool32::default(),
}
}
}
impl DescriptorSetLayoutSupport {
pub fn builder<'a>() -> DescriptorSetLayoutSupportBuilder<'a> {
DescriptorSetLayoutSupportBuilder {
inner: DescriptorSetLayoutSupport::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetLayoutSupportBuilder<'a> {
inner: DescriptorSetLayoutSupport,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDescriptorSetLayoutSupport {}
impl<'a> ::std::ops::Deref for DescriptorSetLayoutSupportBuilder<'a> {
type Target = DescriptorSetLayoutSupport;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutSupportBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetLayoutSupportBuilder<'a> {
pub fn supported(mut self, supported: bool) -> DescriptorSetLayoutSupportBuilder<'a> {
self.inner.supported = supported.into();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDescriptorSetLayoutSupport>(
mut self,
next: &'a mut T,
) -> DescriptorSetLayoutSupportBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetLayoutSupport {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShaderDrawParameterFeatures.html>"]
pub struct PhysicalDeviceShaderDrawParameterFeatures {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_draw_parameters: Bool32,
}
impl ::std::default::Default for PhysicalDeviceShaderDrawParameterFeatures {
fn default() -> PhysicalDeviceShaderDrawParameterFeatures {
PhysicalDeviceShaderDrawParameterFeatures {
s_type: StructureType::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
p_next: ::std::ptr::null_mut(),
shader_draw_parameters: Bool32::default(),
}
}
}
impl PhysicalDeviceShaderDrawParameterFeatures {
pub fn builder<'a>() -> PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
PhysicalDeviceShaderDrawParameterFeaturesBuilder {
inner: PhysicalDeviceShaderDrawParameterFeatures::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
inner: PhysicalDeviceShaderDrawParameterFeatures,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDrawParameterFeaturesBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDrawParameterFeatures {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
type Target = PhysicalDeviceShaderDrawParameterFeatures;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
pub fn shader_draw_parameters(
mut self,
shader_draw_parameters: bool,
) -> PhysicalDeviceShaderDrawParameterFeaturesBuilder<'a> {
self.inner.shader_draw_parameters = shader_draw_parameters.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShaderDrawParameterFeatures {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFloat16Int8FeaturesKHR.html>"]
pub struct PhysicalDeviceFloat16Int8FeaturesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_float16: Bool32,
pub shader_int8: Bool32,
}
impl ::std::default::Default for PhysicalDeviceFloat16Int8FeaturesKHR {
fn default() -> PhysicalDeviceFloat16Int8FeaturesKHR {
PhysicalDeviceFloat16Int8FeaturesKHR {
s_type: StructureType::PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
p_next: ::std::ptr::null_mut(),
shader_float16: Bool32::default(),
shader_int8: Bool32::default(),
}
}
}
impl PhysicalDeviceFloat16Int8FeaturesKHR {
pub fn builder<'a>() -> PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
PhysicalDeviceFloat16Int8FeaturesKHRBuilder {
inner: PhysicalDeviceFloat16Int8FeaturesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
inner: PhysicalDeviceFloat16Int8FeaturesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFloat16Int8FeaturesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
type Target = PhysicalDeviceFloat16Int8FeaturesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
pub fn shader_float16(
mut self,
shader_float16: bool,
) -> PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
self.inner.shader_float16 = shader_float16.into();
self
}
pub fn shader_int8(
mut self,
shader_int8: bool,
) -> PhysicalDeviceFloat16Int8FeaturesKHRBuilder<'a> {
self.inner.shader_int8 = shader_int8.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFloat16Int8FeaturesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFloatControlsPropertiesKHR.html>"]
pub struct PhysicalDeviceFloatControlsPropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub separate_denorm_settings: Bool32,
pub separate_rounding_mode_settings: Bool32,
pub shader_signed_zero_inf_nan_preserve_float16: Bool32,
pub shader_signed_zero_inf_nan_preserve_float32: Bool32,
pub shader_signed_zero_inf_nan_preserve_float64: Bool32,
pub shader_denorm_preserve_float16: Bool32,
pub shader_denorm_preserve_float32: Bool32,
pub shader_denorm_preserve_float64: Bool32,
pub shader_denorm_flush_to_zero_float16: Bool32,
pub shader_denorm_flush_to_zero_float32: Bool32,
pub shader_denorm_flush_to_zero_float64: Bool32,
pub shader_rounding_mode_rte_float16: Bool32,
pub shader_rounding_mode_rte_float32: Bool32,
pub shader_rounding_mode_rte_float64: Bool32,
pub shader_rounding_mode_rtz_float16: Bool32,
pub shader_rounding_mode_rtz_float32: Bool32,
pub shader_rounding_mode_rtz_float64: Bool32,
}
impl ::std::default::Default for PhysicalDeviceFloatControlsPropertiesKHR {
fn default() -> PhysicalDeviceFloatControlsPropertiesKHR {
PhysicalDeviceFloatControlsPropertiesKHR {
s_type: StructureType::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
separate_denorm_settings: Bool32::default(),
separate_rounding_mode_settings: Bool32::default(),
shader_signed_zero_inf_nan_preserve_float16: Bool32::default(),
shader_signed_zero_inf_nan_preserve_float32: Bool32::default(),
shader_signed_zero_inf_nan_preserve_float64: Bool32::default(),
shader_denorm_preserve_float16: Bool32::default(),
shader_denorm_preserve_float32: Bool32::default(),
shader_denorm_preserve_float64: Bool32::default(),
shader_denorm_flush_to_zero_float16: Bool32::default(),
shader_denorm_flush_to_zero_float32: Bool32::default(),
shader_denorm_flush_to_zero_float64: Bool32::default(),
shader_rounding_mode_rte_float16: Bool32::default(),
shader_rounding_mode_rte_float32: Bool32::default(),
shader_rounding_mode_rte_float64: Bool32::default(),
shader_rounding_mode_rtz_float16: Bool32::default(),
shader_rounding_mode_rtz_float32: Bool32::default(),
shader_rounding_mode_rtz_float64: Bool32::default(),
}
}
}
impl PhysicalDeviceFloatControlsPropertiesKHR {
pub fn builder<'a>() -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
PhysicalDeviceFloatControlsPropertiesKHRBuilder {
inner: PhysicalDeviceFloatControlsPropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
inner: PhysicalDeviceFloatControlsPropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceFloatControlsPropertiesKHRBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFloatControlsPropertiesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
type Target = PhysicalDeviceFloatControlsPropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
pub fn separate_denorm_settings(
mut self,
separate_denorm_settings: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.separate_denorm_settings = separate_denorm_settings.into();
self
}
pub fn separate_rounding_mode_settings(
mut self,
separate_rounding_mode_settings: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.separate_rounding_mode_settings = separate_rounding_mode_settings.into();
self
}
pub fn shader_signed_zero_inf_nan_preserve_float16(
mut self,
shader_signed_zero_inf_nan_preserve_float16: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_signed_zero_inf_nan_preserve_float16 =
shader_signed_zero_inf_nan_preserve_float16.into();
self
}
pub fn shader_signed_zero_inf_nan_preserve_float32(
mut self,
shader_signed_zero_inf_nan_preserve_float32: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_signed_zero_inf_nan_preserve_float32 =
shader_signed_zero_inf_nan_preserve_float32.into();
self
}
pub fn shader_signed_zero_inf_nan_preserve_float64(
mut self,
shader_signed_zero_inf_nan_preserve_float64: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_signed_zero_inf_nan_preserve_float64 =
shader_signed_zero_inf_nan_preserve_float64.into();
self
}
pub fn shader_denorm_preserve_float16(
mut self,
shader_denorm_preserve_float16: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_preserve_float16 = shader_denorm_preserve_float16.into();
self
}
pub fn shader_denorm_preserve_float32(
mut self,
shader_denorm_preserve_float32: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_preserve_float32 = shader_denorm_preserve_float32.into();
self
}
pub fn shader_denorm_preserve_float64(
mut self,
shader_denorm_preserve_float64: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_preserve_float64 = shader_denorm_preserve_float64.into();
self
}
pub fn shader_denorm_flush_to_zero_float16(
mut self,
shader_denorm_flush_to_zero_float16: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_flush_to_zero_float16 = shader_denorm_flush_to_zero_float16.into();
self
}
pub fn shader_denorm_flush_to_zero_float32(
mut self,
shader_denorm_flush_to_zero_float32: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_flush_to_zero_float32 = shader_denorm_flush_to_zero_float32.into();
self
}
pub fn shader_denorm_flush_to_zero_float64(
mut self,
shader_denorm_flush_to_zero_float64: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_denorm_flush_to_zero_float64 = shader_denorm_flush_to_zero_float64.into();
self
}
pub fn shader_rounding_mode_rte_float16(
mut self,
shader_rounding_mode_rte_float16: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rte_float16 = shader_rounding_mode_rte_float16.into();
self
}
pub fn shader_rounding_mode_rte_float32(
mut self,
shader_rounding_mode_rte_float32: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rte_float32 = shader_rounding_mode_rte_float32.into();
self
}
pub fn shader_rounding_mode_rte_float64(
mut self,
shader_rounding_mode_rte_float64: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rte_float64 = shader_rounding_mode_rte_float64.into();
self
}
pub fn shader_rounding_mode_rtz_float16(
mut self,
shader_rounding_mode_rtz_float16: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rtz_float16 = shader_rounding_mode_rtz_float16.into();
self
}
pub fn shader_rounding_mode_rtz_float32(
mut self,
shader_rounding_mode_rtz_float32: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rtz_float32 = shader_rounding_mode_rtz_float32.into();
self
}
pub fn shader_rounding_mode_rtz_float64(
mut self,
shader_rounding_mode_rtz_float64: bool,
) -> PhysicalDeviceFloatControlsPropertiesKHRBuilder<'a> {
self.inner.shader_rounding_mode_rtz_float64 = shader_rounding_mode_rtz_float64.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFloatControlsPropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkNativeBufferANDROID.html>"]
pub struct NativeBufferANDROID {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle: *const c_void,
pub stride: c_int,
pub format: c_int,
pub usage: c_int,
}
impl ::std::default::Default for NativeBufferANDROID {
fn default() -> NativeBufferANDROID {
NativeBufferANDROID {
s_type: StructureType::NATIVE_BUFFER_ANDROID,
p_next: ::std::ptr::null(),
handle: ::std::ptr::null(),
stride: c_int::default(),
format: c_int::default(),
usage: c_int::default(),
}
}
}
impl NativeBufferANDROID {
pub fn builder<'a>() -> NativeBufferANDROIDBuilder<'a> {
NativeBufferANDROIDBuilder {
inner: NativeBufferANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct NativeBufferANDROIDBuilder<'a> {
inner: NativeBufferANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsNativeBufferANDROID {}
impl<'a> ::std::ops::Deref for NativeBufferANDROIDBuilder<'a> {
type Target = NativeBufferANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for NativeBufferANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> NativeBufferANDROIDBuilder<'a> {
pub fn handle(mut self, handle: *const c_void) -> NativeBufferANDROIDBuilder<'a> {
self.inner.handle = handle;
self
}
pub fn stride(mut self, stride: c_int) -> NativeBufferANDROIDBuilder<'a> {
self.inner.stride = stride;
self
}
pub fn format(mut self, format: c_int) -> NativeBufferANDROIDBuilder<'a> {
self.inner.format = format;
self
}
pub fn usage(mut self, usage: c_int) -> NativeBufferANDROIDBuilder<'a> {
self.inner.usage = usage;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsNativeBufferANDROID>(
mut self,
next: &'a mut T,
) -> NativeBufferANDROIDBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> NativeBufferANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderResourceUsageAMD.html>"]
pub struct ShaderResourceUsageAMD {
pub num_used_vgprs: u32,
pub num_used_sgprs: u32,
pub lds_size_per_local_work_group: u32,
pub lds_usage_size_in_bytes: usize,
pub scratch_mem_usage_in_bytes: usize,
}
impl ShaderResourceUsageAMD {
pub fn builder<'a>() -> ShaderResourceUsageAMDBuilder<'a> {
ShaderResourceUsageAMDBuilder {
inner: ShaderResourceUsageAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ShaderResourceUsageAMDBuilder<'a> {
inner: ShaderResourceUsageAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ShaderResourceUsageAMDBuilder<'a> {
type Target = ShaderResourceUsageAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ShaderResourceUsageAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ShaderResourceUsageAMDBuilder<'a> {
pub fn num_used_vgprs(mut self, num_used_vgprs: u32) -> ShaderResourceUsageAMDBuilder<'a> {
self.inner.num_used_vgprs = num_used_vgprs;
self
}
pub fn num_used_sgprs(mut self, num_used_sgprs: u32) -> ShaderResourceUsageAMDBuilder<'a> {
self.inner.num_used_sgprs = num_used_sgprs;
self
}
pub fn lds_size_per_local_work_group(
mut self,
lds_size_per_local_work_group: u32,
) -> ShaderResourceUsageAMDBuilder<'a> {
self.inner.lds_size_per_local_work_group = lds_size_per_local_work_group;
self
}
pub fn lds_usage_size_in_bytes(
mut self,
lds_usage_size_in_bytes: usize,
) -> ShaderResourceUsageAMDBuilder<'a> {
self.inner.lds_usage_size_in_bytes = lds_usage_size_in_bytes;
self
}
pub fn scratch_mem_usage_in_bytes(
mut self,
scratch_mem_usage_in_bytes: usize,
) -> ShaderResourceUsageAMDBuilder<'a> {
self.inner.scratch_mem_usage_in_bytes = scratch_mem_usage_in_bytes;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ShaderResourceUsageAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderStatisticsInfoAMD.html>"]
pub struct ShaderStatisticsInfoAMD {
pub shader_stage_mask: ShaderStageFlags,
pub resource_usage: ShaderResourceUsageAMD,
pub num_physical_vgprs: u32,
pub num_physical_sgprs: u32,
pub num_available_vgprs: u32,
pub num_available_sgprs: u32,
pub compute_work_group_size: [u32; 3],
}
impl ::std::default::Default for ShaderStatisticsInfoAMD {
fn default() -> ShaderStatisticsInfoAMD {
ShaderStatisticsInfoAMD {
shader_stage_mask: ShaderStageFlags::default(),
resource_usage: ShaderResourceUsageAMD::default(),
num_physical_vgprs: u32::default(),
num_physical_sgprs: u32::default(),
num_available_vgprs: u32::default(),
num_available_sgprs: u32::default(),
compute_work_group_size: unsafe { ::std::mem::zeroed() },
}
}
}
impl ShaderStatisticsInfoAMD {
pub fn builder<'a>() -> ShaderStatisticsInfoAMDBuilder<'a> {
ShaderStatisticsInfoAMDBuilder {
inner: ShaderStatisticsInfoAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ShaderStatisticsInfoAMDBuilder<'a> {
inner: ShaderStatisticsInfoAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ShaderStatisticsInfoAMDBuilder<'a> {
type Target = ShaderStatisticsInfoAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ShaderStatisticsInfoAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ShaderStatisticsInfoAMDBuilder<'a> {
pub fn shader_stage_mask(
mut self,
shader_stage_mask: ShaderStageFlags,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.shader_stage_mask = shader_stage_mask;
self
}
pub fn resource_usage(
mut self,
resource_usage: ShaderResourceUsageAMD,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.resource_usage = resource_usage;
self
}
pub fn num_physical_vgprs(
mut self,
num_physical_vgprs: u32,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.num_physical_vgprs = num_physical_vgprs;
self
}
pub fn num_physical_sgprs(
mut self,
num_physical_sgprs: u32,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.num_physical_sgprs = num_physical_sgprs;
self
}
pub fn num_available_vgprs(
mut self,
num_available_vgprs: u32,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.num_available_vgprs = num_available_vgprs;
self
}
pub fn num_available_sgprs(
mut self,
num_available_sgprs: u32,
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.num_available_sgprs = num_available_sgprs;
self
}
pub fn compute_work_group_size(
mut self,
compute_work_group_size: [u32; 3],
) -> ShaderStatisticsInfoAMDBuilder<'a> {
self.inner.compute_work_group_size = compute_work_group_size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ShaderStatisticsInfoAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceQueueGlobalPriorityCreateInfoEXT.html>"]
pub struct DeviceQueueGlobalPriorityCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub global_priority: QueueGlobalPriorityEXT,
}
impl ::std::default::Default for DeviceQueueGlobalPriorityCreateInfoEXT {
fn default() -> DeviceQueueGlobalPriorityCreateInfoEXT {
DeviceQueueGlobalPriorityCreateInfoEXT {
s_type: StructureType::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
global_priority: QueueGlobalPriorityEXT::default(),
}
}
}
impl DeviceQueueGlobalPriorityCreateInfoEXT {
pub fn builder<'a>() -> DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
DeviceQueueGlobalPriorityCreateInfoEXTBuilder {
inner: DeviceQueueGlobalPriorityCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
inner: DeviceQueueGlobalPriorityCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueGlobalPriorityCreateInfoEXT {}
impl<'a> ::std::ops::Deref for DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
type Target = DeviceQueueGlobalPriorityCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
pub fn global_priority(
mut self,
global_priority: QueueGlobalPriorityEXT,
) -> DeviceQueueGlobalPriorityCreateInfoEXTBuilder<'a> {
self.inner.global_priority = global_priority;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceQueueGlobalPriorityCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsObjectNameInfoEXT.html>"]
pub struct DebugUtilsObjectNameInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_type: ObjectType,
pub object_handle: u64,
pub p_object_name: *const c_char,
}
impl ::std::default::Default for DebugUtilsObjectNameInfoEXT {
fn default() -> DebugUtilsObjectNameInfoEXT {
DebugUtilsObjectNameInfoEXT {
s_type: StructureType::DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
p_next: ::std::ptr::null(),
object_type: ObjectType::default(),
object_handle: u64::default(),
p_object_name: ::std::ptr::null(),
}
}
}
impl DebugUtilsObjectNameInfoEXT {
pub fn builder<'a>() -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
DebugUtilsObjectNameInfoEXTBuilder {
inner: DebugUtilsObjectNameInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugUtilsObjectNameInfoEXTBuilder<'a> {
inner: DebugUtilsObjectNameInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugUtilsObjectNameInfoEXT {}
impl<'a> ::std::ops::Deref for DebugUtilsObjectNameInfoEXTBuilder<'a> {
type Target = DebugUtilsObjectNameInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugUtilsObjectNameInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugUtilsObjectNameInfoEXTBuilder<'a> {
pub fn object_type(
mut self,
object_type: ObjectType,
) -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
self.inner.object_type = object_type;
self
}
pub fn object_handle(mut self, object_handle: u64) -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
self.inner.object_handle = object_handle;
self
}
pub fn object_name(
mut self,
object_name: &'a ::std::ffi::CStr,
) -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
self.inner.p_object_name = object_name.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugUtilsObjectNameInfoEXT>(
mut self,
next: &'a mut T,
) -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugUtilsObjectNameInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsObjectTagInfoEXT.html>"]
pub struct DebugUtilsObjectTagInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub object_type: ObjectType,
pub object_handle: u64,
pub tag_name: u64,
pub tag_size: usize,
pub p_tag: *const c_void,
}
impl ::std::default::Default for DebugUtilsObjectTagInfoEXT {
fn default() -> DebugUtilsObjectTagInfoEXT {
DebugUtilsObjectTagInfoEXT {
s_type: StructureType::DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
p_next: ::std::ptr::null(),
object_type: ObjectType::default(),
object_handle: u64::default(),
tag_name: u64::default(),
tag_size: usize::default(),
p_tag: ::std::ptr::null(),
}
}
}
impl DebugUtilsObjectTagInfoEXT {
pub fn builder<'a>() -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
DebugUtilsObjectTagInfoEXTBuilder {
inner: DebugUtilsObjectTagInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugUtilsObjectTagInfoEXTBuilder<'a> {
inner: DebugUtilsObjectTagInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugUtilsObjectTagInfoEXT {}
impl<'a> ::std::ops::Deref for DebugUtilsObjectTagInfoEXTBuilder<'a> {
type Target = DebugUtilsObjectTagInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugUtilsObjectTagInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugUtilsObjectTagInfoEXTBuilder<'a> {
pub fn object_type(mut self, object_type: ObjectType) -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
self.inner.object_type = object_type;
self
}
pub fn object_handle(mut self, object_handle: u64) -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
self.inner.object_handle = object_handle;
self
}
pub fn tag_name(mut self, tag_name: u64) -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
self.inner.tag_name = tag_name;
self
}
pub fn tag(mut self, tag: &'a [u8]) -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
self.inner.tag_size = tag.len() as _;
self.inner.p_tag = tag.as_ptr() as *const c_void;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugUtilsObjectTagInfoEXT>(
mut self,
next: &'a mut T,
) -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugUtilsObjectTagInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsLabelEXT.html>"]
pub struct DebugUtilsLabelEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub p_label_name: *const c_char,
pub color: [f32; 4],
}
impl ::std::default::Default for DebugUtilsLabelEXT {
fn default() -> DebugUtilsLabelEXT {
DebugUtilsLabelEXT {
s_type: StructureType::DEBUG_UTILS_LABEL_EXT,
p_next: ::std::ptr::null(),
p_label_name: ::std::ptr::null(),
color: unsafe { ::std::mem::zeroed() },
}
}
}
impl DebugUtilsLabelEXT {
pub fn builder<'a>() -> DebugUtilsLabelEXTBuilder<'a> {
DebugUtilsLabelEXTBuilder {
inner: DebugUtilsLabelEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugUtilsLabelEXTBuilder<'a> {
inner: DebugUtilsLabelEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugUtilsLabelEXT {}
impl<'a> ::std::ops::Deref for DebugUtilsLabelEXTBuilder<'a> {
type Target = DebugUtilsLabelEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugUtilsLabelEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugUtilsLabelEXTBuilder<'a> {
pub fn label_name(mut self, label_name: &'a ::std::ffi::CStr) -> DebugUtilsLabelEXTBuilder<'a> {
self.inner.p_label_name = label_name.as_ptr();
self
}
pub fn color(mut self, color: [f32; 4]) -> DebugUtilsLabelEXTBuilder<'a> {
self.inner.color = color;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugUtilsLabelEXT>(
mut self,
next: &'a mut T,
) -> DebugUtilsLabelEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugUtilsLabelEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessengerCreateInfoEXT.html>"]
pub struct DebugUtilsMessengerCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DebugUtilsMessengerCreateFlagsEXT,
pub message_severity: DebugUtilsMessageSeverityFlagsEXT,
pub message_type: DebugUtilsMessageTypeFlagsEXT,
pub pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT,
pub p_user_data: *mut c_void,
}
impl fmt::Debug for DebugUtilsMessengerCreateInfoEXT {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("DebugUtilsMessengerCreateInfoEXT")
.field("s_type", &self.s_type)
.field("p_next", &self.p_next)
.field("flags", &self.flags)
.field("message_severity", &self.message_severity)
.field("message_type", &self.message_type)
.field(
"pfn_user_callback",
&(self.pfn_user_callback.map(|x| x as *const ())),
)
.field("p_user_data", &self.p_user_data)
.finish()
}
}
impl ::std::default::Default for DebugUtilsMessengerCreateInfoEXT {
fn default() -> DebugUtilsMessengerCreateInfoEXT {
DebugUtilsMessengerCreateInfoEXT {
s_type: StructureType::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: DebugUtilsMessengerCreateFlagsEXT::default(),
message_severity: DebugUtilsMessageSeverityFlagsEXT::default(),
message_type: DebugUtilsMessageTypeFlagsEXT::default(),
pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT::default(),
p_user_data: ::std::ptr::null_mut(),
}
}
}
impl DebugUtilsMessengerCreateInfoEXT {
pub fn builder<'a>() -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
DebugUtilsMessengerCreateInfoEXTBuilder {
inner: DebugUtilsMessengerCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
inner: DebugUtilsMessengerCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsInstanceCreateInfo for DebugUtilsMessengerCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsInstanceCreateInfo for DebugUtilsMessengerCreateInfoEXT {}
impl<'a> ::std::ops::Deref for DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
type Target = DebugUtilsMessengerCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: DebugUtilsMessengerCreateFlagsEXT,
) -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn message_severity(
mut self,
message_severity: DebugUtilsMessageSeverityFlagsEXT,
) -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
self.inner.message_severity = message_severity;
self
}
pub fn message_type(
mut self,
message_type: DebugUtilsMessageTypeFlagsEXT,
) -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
self.inner.message_type = message_type;
self
}
pub fn pfn_user_callback(
mut self,
pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT,
) -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
self.inner.pfn_user_callback = pfn_user_callback;
self
}
pub fn user_data(
mut self,
user_data: *mut c_void,
) -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
self.inner.p_user_data = user_data;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugUtilsMessengerCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessengerCallbackDataEXT.html>"]
pub struct DebugUtilsMessengerCallbackDataEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: DebugUtilsMessengerCallbackDataFlagsEXT,
pub p_message_id_name: *const c_char,
pub message_id_number: i32,
pub p_message: *const c_char,
pub queue_label_count: u32,
pub p_queue_labels: *const DebugUtilsLabelEXT,
pub cmd_buf_label_count: u32,
pub p_cmd_buf_labels: *const DebugUtilsLabelEXT,
pub object_count: u32,
pub p_objects: *const DebugUtilsObjectNameInfoEXT,
}
impl ::std::default::Default for DebugUtilsMessengerCallbackDataEXT {
fn default() -> DebugUtilsMessengerCallbackDataEXT {
DebugUtilsMessengerCallbackDataEXT {
s_type: StructureType::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
p_next: ::std::ptr::null(),
flags: DebugUtilsMessengerCallbackDataFlagsEXT::default(),
p_message_id_name: ::std::ptr::null(),
message_id_number: i32::default(),
p_message: ::std::ptr::null(),
queue_label_count: u32::default(),
p_queue_labels: ::std::ptr::null(),
cmd_buf_label_count: u32::default(),
p_cmd_buf_labels: ::std::ptr::null(),
object_count: u32::default(),
p_objects: ::std::ptr::null(),
}
}
}
impl DebugUtilsMessengerCallbackDataEXT {
pub fn builder<'a>() -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
DebugUtilsMessengerCallbackDataEXTBuilder {
inner: DebugUtilsMessengerCallbackDataEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
inner: DebugUtilsMessengerCallbackDataEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsDebugUtilsMessengerCallbackDataEXT {}
impl<'a> ::std::ops::Deref for DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
type Target = DebugUtilsMessengerCallbackDataEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
pub fn flags(
mut self,
flags: DebugUtilsMessengerCallbackDataFlagsEXT,
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn message_id_name(
mut self,
message_id_name: &'a ::std::ffi::CStr,
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.p_message_id_name = message_id_name.as_ptr();
self
}
pub fn message_id_number(
mut self,
message_id_number: i32,
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.message_id_number = message_id_number;
self
}
pub fn message(
mut self,
message: &'a ::std::ffi::CStr,
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.p_message = message.as_ptr();
self
}
pub fn queue_labels(
mut self,
queue_labels: &'a [DebugUtilsLabelEXT],
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.queue_label_count = queue_labels.len() as _;
self.inner.p_queue_labels = queue_labels.as_ptr();
self
}
pub fn cmd_buf_labels(
mut self,
cmd_buf_labels: &'a [DebugUtilsLabelEXT],
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.cmd_buf_label_count = cmd_buf_labels.len() as _;
self.inner.p_cmd_buf_labels = cmd_buf_labels.as_ptr();
self
}
pub fn objects(
mut self,
objects: &'a [DebugUtilsObjectNameInfoEXT],
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
self.inner.object_count = objects.len() as _;
self.inner.p_objects = objects.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsDebugUtilsMessengerCallbackDataEXT>(
mut self,
next: &'a mut T,
) -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DebugUtilsMessengerCallbackDataEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportMemoryHostPointerInfoEXT.html>"]
pub struct ImportMemoryHostPointerInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub handle_type: ExternalMemoryHandleTypeFlags,
pub p_host_pointer: *mut c_void,
}
impl ::std::default::Default for ImportMemoryHostPointerInfoEXT {
fn default() -> ImportMemoryHostPointerInfoEXT {
ImportMemoryHostPointerInfoEXT {
s_type: StructureType::IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
p_next: ::std::ptr::null(),
handle_type: ExternalMemoryHandleTypeFlags::default(),
p_host_pointer: ::std::ptr::null_mut(),
}
}
}
impl ImportMemoryHostPointerInfoEXT {
pub fn builder<'a>() -> ImportMemoryHostPointerInfoEXTBuilder<'a> {
ImportMemoryHostPointerInfoEXTBuilder {
inner: ImportMemoryHostPointerInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportMemoryHostPointerInfoEXTBuilder<'a> {
inner: ImportMemoryHostPointerInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryHostPointerInfoEXTBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryHostPointerInfoEXT {}
impl<'a> ::std::ops::Deref for ImportMemoryHostPointerInfoEXTBuilder<'a> {
type Target = ImportMemoryHostPointerInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportMemoryHostPointerInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportMemoryHostPointerInfoEXTBuilder<'a> {
pub fn handle_type(
mut self,
handle_type: ExternalMemoryHandleTypeFlags,
) -> ImportMemoryHostPointerInfoEXTBuilder<'a> {
self.inner.handle_type = handle_type;
self
}
pub fn host_pointer(
mut self,
host_pointer: *mut c_void,
) -> ImportMemoryHostPointerInfoEXTBuilder<'a> {
self.inner.p_host_pointer = host_pointer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportMemoryHostPointerInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryHostPointerPropertiesEXT.html>"]
pub struct MemoryHostPointerPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_type_bits: u32,
}
impl ::std::default::Default for MemoryHostPointerPropertiesEXT {
fn default() -> MemoryHostPointerPropertiesEXT {
MemoryHostPointerPropertiesEXT {
s_type: StructureType::MEMORY_HOST_POINTER_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
memory_type_bits: u32::default(),
}
}
}
impl MemoryHostPointerPropertiesEXT {
pub fn builder<'a>() -> MemoryHostPointerPropertiesEXTBuilder<'a> {
MemoryHostPointerPropertiesEXTBuilder {
inner: MemoryHostPointerPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryHostPointerPropertiesEXTBuilder<'a> {
inner: MemoryHostPointerPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryHostPointerPropertiesEXT {}
impl<'a> ::std::ops::Deref for MemoryHostPointerPropertiesEXTBuilder<'a> {
type Target = MemoryHostPointerPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryHostPointerPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryHostPointerPropertiesEXTBuilder<'a> {
pub fn memory_type_bits(
mut self,
memory_type_bits: u32,
) -> MemoryHostPointerPropertiesEXTBuilder<'a> {
self.inner.memory_type_bits = memory_type_bits;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryHostPointerPropertiesEXT>(
mut self,
next: &'a mut T,
) -> MemoryHostPointerPropertiesEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryHostPointerPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html>"]
pub struct PhysicalDeviceExternalMemoryHostPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub min_imported_host_pointer_alignment: DeviceSize,
}
impl ::std::default::Default for PhysicalDeviceExternalMemoryHostPropertiesEXT {
fn default() -> PhysicalDeviceExternalMemoryHostPropertiesEXT {
PhysicalDeviceExternalMemoryHostPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
min_imported_host_pointer_alignment: DeviceSize::default(),
}
}
}
impl PhysicalDeviceExternalMemoryHostPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder {
inner: PhysicalDeviceExternalMemoryHostPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceExternalMemoryHostPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExternalMemoryHostPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceExternalMemoryHostPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
pub fn min_imported_host_pointer_alignment(
mut self,
min_imported_host_pointer_alignment: DeviceSize,
) -> PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
self.inner.min_imported_host_pointer_alignment = min_imported_host_pointer_alignment;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExternalMemoryHostPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html>"]
pub struct PhysicalDeviceConservativeRasterizationPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub primitive_overestimation_size: f32,
pub max_extra_primitive_overestimation_size: f32,
pub extra_primitive_overestimation_size_granularity: f32,
pub primitive_underestimation: Bool32,
pub conservative_point_and_line_rasterization: Bool32,
pub degenerate_triangles_rasterized: Bool32,
pub degenerate_lines_rasterized: Bool32,
pub fully_covered_fragment_shader_input_variable: Bool32,
pub conservative_rasterization_post_depth_coverage: Bool32,
}
impl ::std::default::Default for PhysicalDeviceConservativeRasterizationPropertiesEXT {
fn default() -> PhysicalDeviceConservativeRasterizationPropertiesEXT {
PhysicalDeviceConservativeRasterizationPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
primitive_overestimation_size: f32::default(),
max_extra_primitive_overestimation_size: f32::default(),
extra_primitive_overestimation_size_granularity: f32::default(),
primitive_underestimation: Bool32::default(),
conservative_point_and_line_rasterization: Bool32::default(),
degenerate_triangles_rasterized: Bool32::default(),
degenerate_lines_rasterized: Bool32::default(),
fully_covered_fragment_shader_input_variable: Bool32::default(),
conservative_rasterization_post_depth_coverage: Bool32::default(),
}
}
}
impl PhysicalDeviceConservativeRasterizationPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder {
inner: PhysicalDeviceConservativeRasterizationPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceConservativeRasterizationPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceConservativeRasterizationPropertiesEXT
{
}
impl<'a> ::std::ops::Deref for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceConservativeRasterizationPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
pub fn primitive_overestimation_size(
mut self,
primitive_overestimation_size: f32,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.primitive_overestimation_size = primitive_overestimation_size;
self
}
pub fn max_extra_primitive_overestimation_size(
mut self,
max_extra_primitive_overestimation_size: f32,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.max_extra_primitive_overestimation_size =
max_extra_primitive_overestimation_size;
self
}
pub fn extra_primitive_overestimation_size_granularity(
mut self,
extra_primitive_overestimation_size_granularity: f32,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.extra_primitive_overestimation_size_granularity =
extra_primitive_overestimation_size_granularity;
self
}
pub fn primitive_underestimation(
mut self,
primitive_underestimation: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.primitive_underestimation = primitive_underestimation.into();
self
}
pub fn conservative_point_and_line_rasterization(
mut self,
conservative_point_and_line_rasterization: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.conservative_point_and_line_rasterization =
conservative_point_and_line_rasterization.into();
self
}
pub fn degenerate_triangles_rasterized(
mut self,
degenerate_triangles_rasterized: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.degenerate_triangles_rasterized = degenerate_triangles_rasterized.into();
self
}
pub fn degenerate_lines_rasterized(
mut self,
degenerate_lines_rasterized: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.degenerate_lines_rasterized = degenerate_lines_rasterized.into();
self
}
pub fn fully_covered_fragment_shader_input_variable(
mut self,
fully_covered_fragment_shader_input_variable: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.fully_covered_fragment_shader_input_variable =
fully_covered_fragment_shader_input_variable.into();
self
}
pub fn conservative_rasterization_post_depth_coverage(
mut self,
conservative_rasterization_post_depth_coverage: bool,
) -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
self.inner.conservative_rasterization_post_depth_coverage =
conservative_rasterization_post_depth_coverage.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceConservativeRasterizationPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCalibratedTimestampInfoEXT.html>"]
pub struct CalibratedTimestampInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub time_domain: TimeDomainEXT,
}
impl ::std::default::Default for CalibratedTimestampInfoEXT {
fn default() -> CalibratedTimestampInfoEXT {
CalibratedTimestampInfoEXT {
s_type: StructureType::CALIBRATED_TIMESTAMP_INFO_EXT,
p_next: ::std::ptr::null(),
time_domain: TimeDomainEXT::default(),
}
}
}
impl CalibratedTimestampInfoEXT {
pub fn builder<'a>() -> CalibratedTimestampInfoEXTBuilder<'a> {
CalibratedTimestampInfoEXTBuilder {
inner: CalibratedTimestampInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CalibratedTimestampInfoEXTBuilder<'a> {
inner: CalibratedTimestampInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCalibratedTimestampInfoEXT {}
impl<'a> ::std::ops::Deref for CalibratedTimestampInfoEXTBuilder<'a> {
type Target = CalibratedTimestampInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CalibratedTimestampInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CalibratedTimestampInfoEXTBuilder<'a> {
pub fn time_domain(
mut self,
time_domain: TimeDomainEXT,
) -> CalibratedTimestampInfoEXTBuilder<'a> {
self.inner.time_domain = time_domain;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCalibratedTimestampInfoEXT>(
mut self,
next: &'a mut T,
) -> CalibratedTimestampInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CalibratedTimestampInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShaderCorePropertiesAMD.html>"]
pub struct PhysicalDeviceShaderCorePropertiesAMD {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_engine_count: u32,
pub shader_arrays_per_engine_count: u32,
pub compute_units_per_shader_array: u32,
pub simd_per_compute_unit: u32,
pub wavefronts_per_simd: u32,
pub wavefront_size: u32,
pub sgprs_per_simd: u32,
pub min_sgpr_allocation: u32,
pub max_sgpr_allocation: u32,
pub sgpr_allocation_granularity: u32,
pub vgprs_per_simd: u32,
pub min_vgpr_allocation: u32,
pub max_vgpr_allocation: u32,
pub vgpr_allocation_granularity: u32,
}
impl ::std::default::Default for PhysicalDeviceShaderCorePropertiesAMD {
fn default() -> PhysicalDeviceShaderCorePropertiesAMD {
PhysicalDeviceShaderCorePropertiesAMD {
s_type: StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
p_next: ::std::ptr::null_mut(),
shader_engine_count: u32::default(),
shader_arrays_per_engine_count: u32::default(),
compute_units_per_shader_array: u32::default(),
simd_per_compute_unit: u32::default(),
wavefronts_per_simd: u32::default(),
wavefront_size: u32::default(),
sgprs_per_simd: u32::default(),
min_sgpr_allocation: u32::default(),
max_sgpr_allocation: u32::default(),
sgpr_allocation_granularity: u32::default(),
vgprs_per_simd: u32::default(),
min_vgpr_allocation: u32::default(),
max_vgpr_allocation: u32::default(),
vgpr_allocation_granularity: u32::default(),
}
}
}
impl PhysicalDeviceShaderCorePropertiesAMD {
pub fn builder<'a>() -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
PhysicalDeviceShaderCorePropertiesAMDBuilder {
inner: PhysicalDeviceShaderCorePropertiesAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
inner: PhysicalDeviceShaderCorePropertiesAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesAMDBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesAMD {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
type Target = PhysicalDeviceShaderCorePropertiesAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
pub fn shader_engine_count(
mut self,
shader_engine_count: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.shader_engine_count = shader_engine_count;
self
}
pub fn shader_arrays_per_engine_count(
mut self,
shader_arrays_per_engine_count: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.shader_arrays_per_engine_count = shader_arrays_per_engine_count;
self
}
pub fn compute_units_per_shader_array(
mut self,
compute_units_per_shader_array: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.compute_units_per_shader_array = compute_units_per_shader_array;
self
}
pub fn simd_per_compute_unit(
mut self,
simd_per_compute_unit: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.simd_per_compute_unit = simd_per_compute_unit;
self
}
pub fn wavefronts_per_simd(
mut self,
wavefronts_per_simd: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.wavefronts_per_simd = wavefronts_per_simd;
self
}
pub fn wavefront_size(
mut self,
wavefront_size: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.wavefront_size = wavefront_size;
self
}
pub fn sgprs_per_simd(
mut self,
sgprs_per_simd: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.sgprs_per_simd = sgprs_per_simd;
self
}
pub fn min_sgpr_allocation(
mut self,
min_sgpr_allocation: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.min_sgpr_allocation = min_sgpr_allocation;
self
}
pub fn max_sgpr_allocation(
mut self,
max_sgpr_allocation: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.max_sgpr_allocation = max_sgpr_allocation;
self
}
pub fn sgpr_allocation_granularity(
mut self,
sgpr_allocation_granularity: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.sgpr_allocation_granularity = sgpr_allocation_granularity;
self
}
pub fn vgprs_per_simd(
mut self,
vgprs_per_simd: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.vgprs_per_simd = vgprs_per_simd;
self
}
pub fn min_vgpr_allocation(
mut self,
min_vgpr_allocation: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.min_vgpr_allocation = min_vgpr_allocation;
self
}
pub fn max_vgpr_allocation(
mut self,
max_vgpr_allocation: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.max_vgpr_allocation = max_vgpr_allocation;
self
}
pub fn vgpr_allocation_granularity(
mut self,
vgpr_allocation_granularity: u32,
) -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
self.inner.vgpr_allocation_granularity = vgpr_allocation_granularity;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShaderCorePropertiesAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationConservativeStateCreateInfoEXT.html>"]
pub struct PipelineRasterizationConservativeStateCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineRasterizationConservativeStateCreateFlagsEXT,
pub conservative_rasterization_mode: ConservativeRasterizationModeEXT,
pub extra_primitive_overestimation_size: f32,
}
impl ::std::default::Default for PipelineRasterizationConservativeStateCreateInfoEXT {
fn default() -> PipelineRasterizationConservativeStateCreateInfoEXT {
PipelineRasterizationConservativeStateCreateInfoEXT {
s_type: StructureType::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: PipelineRasterizationConservativeStateCreateFlagsEXT::default(),
conservative_rasterization_mode: ConservativeRasterizationModeEXT::default(),
extra_primitive_overestimation_size: f32::default(),
}
}
}
impl PipelineRasterizationConservativeStateCreateInfoEXT {
pub fn builder<'a>() -> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
PipelineRasterizationConservativeStateCreateInfoEXTBuilder {
inner: PipelineRasterizationConservativeStateCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
inner: PipelineRasterizationConservativeStateCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationConservativeStateCreateInfoEXT
{
}
impl<'a> ::std::ops::Deref for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
type Target = PipelineRasterizationConservativeStateCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineRasterizationConservativeStateCreateFlagsEXT,
) -> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn conservative_rasterization_mode(
mut self,
conservative_rasterization_mode: ConservativeRasterizationModeEXT,
) -> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
self.inner.conservative_rasterization_mode = conservative_rasterization_mode;
self
}
pub fn extra_primitive_overestimation_size(
mut self,
extra_primitive_overestimation_size: f32,
) -> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
self.inner.extra_primitive_overestimation_size = extra_primitive_overestimation_size;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineRasterizationConservativeStateCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceDescriptorIndexingFeaturesEXT.html>"]
pub struct PhysicalDeviceDescriptorIndexingFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_input_attachment_array_dynamic_indexing: Bool32,
pub shader_uniform_texel_buffer_array_dynamic_indexing: Bool32,
pub shader_storage_texel_buffer_array_dynamic_indexing: Bool32,
pub shader_uniform_buffer_array_non_uniform_indexing: Bool32,
pub shader_sampled_image_array_non_uniform_indexing: Bool32,
pub shader_storage_buffer_array_non_uniform_indexing: Bool32,
pub shader_storage_image_array_non_uniform_indexing: Bool32,
pub shader_input_attachment_array_non_uniform_indexing: Bool32,
pub shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32,
pub shader_storage_texel_buffer_array_non_uniform_indexing: Bool32,
pub descriptor_binding_uniform_buffer_update_after_bind: Bool32,
pub descriptor_binding_sampled_image_update_after_bind: Bool32,
pub descriptor_binding_storage_image_update_after_bind: Bool32,
pub descriptor_binding_storage_buffer_update_after_bind: Bool32,
pub descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32,
pub descriptor_binding_storage_texel_buffer_update_after_bind: Bool32,
pub descriptor_binding_update_unused_while_pending: Bool32,
pub descriptor_binding_partially_bound: Bool32,
pub descriptor_binding_variable_descriptor_count: Bool32,
pub runtime_descriptor_array: Bool32,
}
impl ::std::default::Default for PhysicalDeviceDescriptorIndexingFeaturesEXT {
fn default() -> PhysicalDeviceDescriptorIndexingFeaturesEXT {
PhysicalDeviceDescriptorIndexingFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
shader_input_attachment_array_dynamic_indexing: Bool32::default(),
shader_uniform_texel_buffer_array_dynamic_indexing: Bool32::default(),
shader_storage_texel_buffer_array_dynamic_indexing: Bool32::default(),
shader_uniform_buffer_array_non_uniform_indexing: Bool32::default(),
shader_sampled_image_array_non_uniform_indexing: Bool32::default(),
shader_storage_buffer_array_non_uniform_indexing: Bool32::default(),
shader_storage_image_array_non_uniform_indexing: Bool32::default(),
shader_input_attachment_array_non_uniform_indexing: Bool32::default(),
shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32::default(),
shader_storage_texel_buffer_array_non_uniform_indexing: Bool32::default(),
descriptor_binding_uniform_buffer_update_after_bind: Bool32::default(),
descriptor_binding_sampled_image_update_after_bind: Bool32::default(),
descriptor_binding_storage_image_update_after_bind: Bool32::default(),
descriptor_binding_storage_buffer_update_after_bind: Bool32::default(),
descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32::default(),
descriptor_binding_storage_texel_buffer_update_after_bind: Bool32::default(),
descriptor_binding_update_unused_while_pending: Bool32::default(),
descriptor_binding_partially_bound: Bool32::default(),
descriptor_binding_variable_descriptor_count: Bool32::default(),
runtime_descriptor_array: Bool32::default(),
}
}
}
impl PhysicalDeviceDescriptorIndexingFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder {
inner: PhysicalDeviceDescriptorIndexingFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceDescriptorIndexingFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorIndexingFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceDescriptorIndexingFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
pub fn shader_input_attachment_array_dynamic_indexing(
mut self,
shader_input_attachment_array_dynamic_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.shader_input_attachment_array_dynamic_indexing =
shader_input_attachment_array_dynamic_indexing.into();
self
}
pub fn shader_uniform_texel_buffer_array_dynamic_indexing(
mut self,
shader_uniform_texel_buffer_array_dynamic_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.shader_uniform_texel_buffer_array_dynamic_indexing =
shader_uniform_texel_buffer_array_dynamic_indexing.into();
self
}
pub fn shader_storage_texel_buffer_array_dynamic_indexing(
mut self,
shader_storage_texel_buffer_array_dynamic_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.shader_storage_texel_buffer_array_dynamic_indexing =
shader_storage_texel_buffer_array_dynamic_indexing.into();
self
}
pub fn shader_uniform_buffer_array_non_uniform_indexing(
mut self,
shader_uniform_buffer_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.shader_uniform_buffer_array_non_uniform_indexing =
shader_uniform_buffer_array_non_uniform_indexing.into();
self
}
pub fn shader_sampled_image_array_non_uniform_indexing(
mut self,
shader_sampled_image_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.shader_sampled_image_array_non_uniform_indexing =
shader_sampled_image_array_non_uniform_indexing.into();
self
}
pub fn shader_storage_buffer_array_non_uniform_indexing(
mut self,
shader_storage_buffer_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.shader_storage_buffer_array_non_uniform_indexing =
shader_storage_buffer_array_non_uniform_indexing.into();
self
}
pub fn shader_storage_image_array_non_uniform_indexing(
mut self,
shader_storage_image_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.shader_storage_image_array_non_uniform_indexing =
shader_storage_image_array_non_uniform_indexing.into();
self
}
pub fn shader_input_attachment_array_non_uniform_indexing(
mut self,
shader_input_attachment_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.shader_input_attachment_array_non_uniform_indexing =
shader_input_attachment_array_non_uniform_indexing.into();
self
}
pub fn shader_uniform_texel_buffer_array_non_uniform_indexing(
mut self,
shader_uniform_texel_buffer_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.shader_uniform_texel_buffer_array_non_uniform_indexing =
shader_uniform_texel_buffer_array_non_uniform_indexing.into();
self
}
pub fn shader_storage_texel_buffer_array_non_uniform_indexing(
mut self,
shader_storage_texel_buffer_array_non_uniform_indexing: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.shader_storage_texel_buffer_array_non_uniform_indexing =
shader_storage_texel_buffer_array_non_uniform_indexing.into();
self
}
pub fn descriptor_binding_uniform_buffer_update_after_bind(
mut self,
descriptor_binding_uniform_buffer_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_uniform_buffer_update_after_bind =
descriptor_binding_uniform_buffer_update_after_bind.into();
self
}
pub fn descriptor_binding_sampled_image_update_after_bind(
mut self,
descriptor_binding_sampled_image_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_sampled_image_update_after_bind =
descriptor_binding_sampled_image_update_after_bind.into();
self
}
pub fn descriptor_binding_storage_image_update_after_bind(
mut self,
descriptor_binding_storage_image_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_storage_image_update_after_bind =
descriptor_binding_storage_image_update_after_bind.into();
self
}
pub fn descriptor_binding_storage_buffer_update_after_bind(
mut self,
descriptor_binding_storage_buffer_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_storage_buffer_update_after_bind =
descriptor_binding_storage_buffer_update_after_bind.into();
self
}
pub fn descriptor_binding_uniform_texel_buffer_update_after_bind(
mut self,
descriptor_binding_uniform_texel_buffer_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_uniform_texel_buffer_update_after_bind =
descriptor_binding_uniform_texel_buffer_update_after_bind.into();
self
}
pub fn descriptor_binding_storage_texel_buffer_update_after_bind(
mut self,
descriptor_binding_storage_texel_buffer_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner
.descriptor_binding_storage_texel_buffer_update_after_bind =
descriptor_binding_storage_texel_buffer_update_after_bind.into();
self
}
pub fn descriptor_binding_update_unused_while_pending(
mut self,
descriptor_binding_update_unused_while_pending: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.descriptor_binding_update_unused_while_pending =
descriptor_binding_update_unused_while_pending.into();
self
}
pub fn descriptor_binding_partially_bound(
mut self,
descriptor_binding_partially_bound: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.descriptor_binding_partially_bound = descriptor_binding_partially_bound.into();
self
}
pub fn descriptor_binding_variable_descriptor_count(
mut self,
descriptor_binding_variable_descriptor_count: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.descriptor_binding_variable_descriptor_count =
descriptor_binding_variable_descriptor_count.into();
self
}
pub fn runtime_descriptor_array(
mut self,
runtime_descriptor_array: bool,
) -> PhysicalDeviceDescriptorIndexingFeaturesEXTBuilder<'a> {
self.inner.runtime_descriptor_array = runtime_descriptor_array.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceDescriptorIndexingFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceDescriptorIndexingPropertiesEXT.html>"]
pub struct PhysicalDeviceDescriptorIndexingPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_update_after_bind_descriptors_in_all_pools: u32,
pub shader_uniform_buffer_array_non_uniform_indexing_native: Bool32,
pub shader_sampled_image_array_non_uniform_indexing_native: Bool32,
pub shader_storage_buffer_array_non_uniform_indexing_native: Bool32,
pub shader_storage_image_array_non_uniform_indexing_native: Bool32,
pub shader_input_attachment_array_non_uniform_indexing_native: Bool32,
pub robust_buffer_access_update_after_bind: Bool32,
pub quad_divergent_implicit_lod: Bool32,
pub max_per_stage_descriptor_update_after_bind_samplers: u32,
pub max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
pub max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
pub max_per_stage_descriptor_update_after_bind_sampled_images: u32,
pub max_per_stage_descriptor_update_after_bind_storage_images: u32,
pub max_per_stage_descriptor_update_after_bind_input_attachments: u32,
pub max_per_stage_update_after_bind_resources: u32,
pub max_descriptor_set_update_after_bind_samplers: u32,
pub max_descriptor_set_update_after_bind_uniform_buffers: u32,
pub max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
pub max_descriptor_set_update_after_bind_storage_buffers: u32,
pub max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
pub max_descriptor_set_update_after_bind_sampled_images: u32,
pub max_descriptor_set_update_after_bind_storage_images: u32,
pub max_descriptor_set_update_after_bind_input_attachments: u32,
}
impl ::std::default::Default for PhysicalDeviceDescriptorIndexingPropertiesEXT {
fn default() -> PhysicalDeviceDescriptorIndexingPropertiesEXT {
PhysicalDeviceDescriptorIndexingPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_update_after_bind_descriptors_in_all_pools: u32::default(),
shader_uniform_buffer_array_non_uniform_indexing_native: Bool32::default(),
shader_sampled_image_array_non_uniform_indexing_native: Bool32::default(),
shader_storage_buffer_array_non_uniform_indexing_native: Bool32::default(),
shader_storage_image_array_non_uniform_indexing_native: Bool32::default(),
shader_input_attachment_array_non_uniform_indexing_native: Bool32::default(),
robust_buffer_access_update_after_bind: Bool32::default(),
quad_divergent_implicit_lod: Bool32::default(),
max_per_stage_descriptor_update_after_bind_samplers: u32::default(),
max_per_stage_descriptor_update_after_bind_uniform_buffers: u32::default(),
max_per_stage_descriptor_update_after_bind_storage_buffers: u32::default(),
max_per_stage_descriptor_update_after_bind_sampled_images: u32::default(),
max_per_stage_descriptor_update_after_bind_storage_images: u32::default(),
max_per_stage_descriptor_update_after_bind_input_attachments: u32::default(),
max_per_stage_update_after_bind_resources: u32::default(),
max_descriptor_set_update_after_bind_samplers: u32::default(),
max_descriptor_set_update_after_bind_uniform_buffers: u32::default(),
max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32::default(),
max_descriptor_set_update_after_bind_storage_buffers: u32::default(),
max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32::default(),
max_descriptor_set_update_after_bind_sampled_images: u32::default(),
max_descriptor_set_update_after_bind_storage_images: u32::default(),
max_descriptor_set_update_after_bind_input_attachments: u32::default(),
}
}
}
impl PhysicalDeviceDescriptorIndexingPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder {
inner: PhysicalDeviceDescriptorIndexingPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceDescriptorIndexingPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorIndexingPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceDescriptorIndexingPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
pub fn max_update_after_bind_descriptors_in_all_pools(
mut self,
max_update_after_bind_descriptors_in_all_pools: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner.max_update_after_bind_descriptors_in_all_pools =
max_update_after_bind_descriptors_in_all_pools;
self
}
pub fn shader_uniform_buffer_array_non_uniform_indexing_native(
mut self,
shader_uniform_buffer_array_non_uniform_indexing_native: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.shader_uniform_buffer_array_non_uniform_indexing_native =
shader_uniform_buffer_array_non_uniform_indexing_native.into();
self
}
pub fn shader_sampled_image_array_non_uniform_indexing_native(
mut self,
shader_sampled_image_array_non_uniform_indexing_native: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.shader_sampled_image_array_non_uniform_indexing_native =
shader_sampled_image_array_non_uniform_indexing_native.into();
self
}
pub fn shader_storage_buffer_array_non_uniform_indexing_native(
mut self,
shader_storage_buffer_array_non_uniform_indexing_native: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.shader_storage_buffer_array_non_uniform_indexing_native =
shader_storage_buffer_array_non_uniform_indexing_native.into();
self
}
pub fn shader_storage_image_array_non_uniform_indexing_native(
mut self,
shader_storage_image_array_non_uniform_indexing_native: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.shader_storage_image_array_non_uniform_indexing_native =
shader_storage_image_array_non_uniform_indexing_native.into();
self
}
pub fn shader_input_attachment_array_non_uniform_indexing_native(
mut self,
shader_input_attachment_array_non_uniform_indexing_native: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.shader_input_attachment_array_non_uniform_indexing_native =
shader_input_attachment_array_non_uniform_indexing_native.into();
self
}
pub fn robust_buffer_access_update_after_bind(
mut self,
robust_buffer_access_update_after_bind: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner.robust_buffer_access_update_after_bind =
robust_buffer_access_update_after_bind.into();
self
}
pub fn quad_divergent_implicit_lod(
mut self,
quad_divergent_implicit_lod: bool,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner.quad_divergent_implicit_lod = quad_divergent_implicit_lod.into();
self
}
pub fn max_per_stage_descriptor_update_after_bind_samplers(
mut self,
max_per_stage_descriptor_update_after_bind_samplers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_samplers =
max_per_stage_descriptor_update_after_bind_samplers;
self
}
pub fn max_per_stage_descriptor_update_after_bind_uniform_buffers(
mut self,
max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_uniform_buffers =
max_per_stage_descriptor_update_after_bind_uniform_buffers;
self
}
pub fn max_per_stage_descriptor_update_after_bind_storage_buffers(
mut self,
max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_storage_buffers =
max_per_stage_descriptor_update_after_bind_storage_buffers;
self
}
pub fn max_per_stage_descriptor_update_after_bind_sampled_images(
mut self,
max_per_stage_descriptor_update_after_bind_sampled_images: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_sampled_images =
max_per_stage_descriptor_update_after_bind_sampled_images;
self
}
pub fn max_per_stage_descriptor_update_after_bind_storage_images(
mut self,
max_per_stage_descriptor_update_after_bind_storage_images: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_storage_images =
max_per_stage_descriptor_update_after_bind_storage_images;
self
}
pub fn max_per_stage_descriptor_update_after_bind_input_attachments(
mut self,
max_per_stage_descriptor_update_after_bind_input_attachments: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_per_stage_descriptor_update_after_bind_input_attachments =
max_per_stage_descriptor_update_after_bind_input_attachments;
self
}
pub fn max_per_stage_update_after_bind_resources(
mut self,
max_per_stage_update_after_bind_resources: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner.max_per_stage_update_after_bind_resources =
max_per_stage_update_after_bind_resources;
self
}
pub fn max_descriptor_set_update_after_bind_samplers(
mut self,
max_descriptor_set_update_after_bind_samplers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner.max_descriptor_set_update_after_bind_samplers =
max_descriptor_set_update_after_bind_samplers;
self
}
pub fn max_descriptor_set_update_after_bind_uniform_buffers(
mut self,
max_descriptor_set_update_after_bind_uniform_buffers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_uniform_buffers =
max_descriptor_set_update_after_bind_uniform_buffers;
self
}
pub fn max_descriptor_set_update_after_bind_uniform_buffers_dynamic(
mut self,
max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_uniform_buffers_dynamic =
max_descriptor_set_update_after_bind_uniform_buffers_dynamic;
self
}
pub fn max_descriptor_set_update_after_bind_storage_buffers(
mut self,
max_descriptor_set_update_after_bind_storage_buffers: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_storage_buffers =
max_descriptor_set_update_after_bind_storage_buffers;
self
}
pub fn max_descriptor_set_update_after_bind_storage_buffers_dynamic(
mut self,
max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_storage_buffers_dynamic =
max_descriptor_set_update_after_bind_storage_buffers_dynamic;
self
}
pub fn max_descriptor_set_update_after_bind_sampled_images(
mut self,
max_descriptor_set_update_after_bind_sampled_images: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_sampled_images =
max_descriptor_set_update_after_bind_sampled_images;
self
}
pub fn max_descriptor_set_update_after_bind_storage_images(
mut self,
max_descriptor_set_update_after_bind_storage_images: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_storage_images =
max_descriptor_set_update_after_bind_storage_images;
self
}
pub fn max_descriptor_set_update_after_bind_input_attachments(
mut self,
max_descriptor_set_update_after_bind_input_attachments: u32,
) -> PhysicalDeviceDescriptorIndexingPropertiesEXTBuilder<'a> {
self.inner
.max_descriptor_set_update_after_bind_input_attachments =
max_descriptor_set_update_after_bind_input_attachments;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceDescriptorIndexingPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfoEXT.html>"]
pub struct DescriptorSetLayoutBindingFlagsCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub binding_count: u32,
pub p_binding_flags: *const DescriptorBindingFlagsEXT,
}
impl ::std::default::Default for DescriptorSetLayoutBindingFlagsCreateInfoEXT {
fn default() -> DescriptorSetLayoutBindingFlagsCreateInfoEXT {
DescriptorSetLayoutBindingFlagsCreateInfoEXT {
s_type: StructureType::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
binding_count: u32::default(),
p_binding_flags: ::std::ptr::null(),
}
}
}
impl DescriptorSetLayoutBindingFlagsCreateInfoEXT {
pub fn builder<'a>() -> DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder {
inner: DescriptorSetLayoutBindingFlagsCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
inner: DescriptorSetLayoutBindingFlagsCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDescriptorSetLayoutCreateInfo
for DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsDescriptorSetLayoutCreateInfo for DescriptorSetLayoutBindingFlagsCreateInfoEXT {}
impl<'a> ::std::ops::Deref for DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
type Target = DescriptorSetLayoutBindingFlagsCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
pub fn binding_flags(
mut self,
binding_flags: &'a [DescriptorBindingFlagsEXT],
) -> DescriptorSetLayoutBindingFlagsCreateInfoEXTBuilder<'a> {
self.inner.binding_count = binding_flags.len() as _;
self.inner.p_binding_flags = binding_flags.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetLayoutBindingFlagsCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfoEXT.html>"]
pub struct DescriptorSetVariableDescriptorCountAllocateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub descriptor_set_count: u32,
pub p_descriptor_counts: *const u32,
}
impl ::std::default::Default for DescriptorSetVariableDescriptorCountAllocateInfoEXT {
fn default() -> DescriptorSetVariableDescriptorCountAllocateInfoEXT {
DescriptorSetVariableDescriptorCountAllocateInfoEXT {
s_type: StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
p_next: ::std::ptr::null(),
descriptor_set_count: u32::default(),
p_descriptor_counts: ::std::ptr::null(),
}
}
}
impl DescriptorSetVariableDescriptorCountAllocateInfoEXT {
pub fn builder<'a>() -> DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder {
inner: DescriptorSetVariableDescriptorCountAllocateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
inner: DescriptorSetVariableDescriptorCountAllocateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDescriptorSetAllocateInfo
for DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsDescriptorSetAllocateInfo
for DescriptorSetVariableDescriptorCountAllocateInfoEXT
{
}
impl<'a> ::std::ops::Deref for DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
type Target = DescriptorSetVariableDescriptorCountAllocateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
pub fn descriptor_counts(
mut self,
descriptor_counts: &'a [u32],
) -> DescriptorSetVariableDescriptorCountAllocateInfoEXTBuilder<'a> {
self.inner.descriptor_set_count = descriptor_counts.len() as _;
self.inner.p_descriptor_counts = descriptor_counts.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetVariableDescriptorCountAllocateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupportEXT.html>"]
pub struct DescriptorSetVariableDescriptorCountLayoutSupportEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_variable_descriptor_count: u32,
}
impl ::std::default::Default for DescriptorSetVariableDescriptorCountLayoutSupportEXT {
fn default() -> DescriptorSetVariableDescriptorCountLayoutSupportEXT {
DescriptorSetVariableDescriptorCountLayoutSupportEXT {
s_type: StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
p_next: ::std::ptr::null_mut(),
max_variable_descriptor_count: u32::default(),
}
}
}
impl DescriptorSetVariableDescriptorCountLayoutSupportEXT {
pub fn builder<'a>() -> DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder {
inner: DescriptorSetVariableDescriptorCountLayoutSupportEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
inner: DescriptorSetVariableDescriptorCountLayoutSupportEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDescriptorSetLayoutSupport
for DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'_>
{
}
unsafe impl ExtendsDescriptorSetLayoutSupport
for DescriptorSetVariableDescriptorCountLayoutSupportEXT
{
}
impl<'a> ::std::ops::Deref for DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
type Target = DescriptorSetVariableDescriptorCountLayoutSupportEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
pub fn max_variable_descriptor_count(
mut self,
max_variable_descriptor_count: u32,
) -> DescriptorSetVariableDescriptorCountLayoutSupportEXTBuilder<'a> {
self.inner.max_variable_descriptor_count = max_variable_descriptor_count;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DescriptorSetVariableDescriptorCountLayoutSupportEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentDescription2KHR.html>"]
pub struct AttachmentDescription2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: AttachmentDescriptionFlags,
pub format: Format,
pub samples: SampleCountFlags,
pub load_op: AttachmentLoadOp,
pub store_op: AttachmentStoreOp,
pub stencil_load_op: AttachmentLoadOp,
pub stencil_store_op: AttachmentStoreOp,
pub initial_layout: ImageLayout,
pub final_layout: ImageLayout,
}
impl ::std::default::Default for AttachmentDescription2KHR {
fn default() -> AttachmentDescription2KHR {
AttachmentDescription2KHR {
s_type: StructureType::ATTACHMENT_DESCRIPTION_2_KHR,
p_next: ::std::ptr::null(),
flags: AttachmentDescriptionFlags::default(),
format: Format::default(),
samples: SampleCountFlags::default(),
load_op: AttachmentLoadOp::default(),
store_op: AttachmentStoreOp::default(),
stencil_load_op: AttachmentLoadOp::default(),
stencil_store_op: AttachmentStoreOp::default(),
initial_layout: ImageLayout::default(),
final_layout: ImageLayout::default(),
}
}
}
impl AttachmentDescription2KHR {
pub fn builder<'a>() -> AttachmentDescription2KHRBuilder<'a> {
AttachmentDescription2KHRBuilder {
inner: AttachmentDescription2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AttachmentDescription2KHRBuilder<'a> {
inner: AttachmentDescription2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAttachmentDescription2KHR {}
impl<'a> ::std::ops::Deref for AttachmentDescription2KHRBuilder<'a> {
type Target = AttachmentDescription2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AttachmentDescription2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AttachmentDescription2KHRBuilder<'a> {
pub fn flags(
mut self,
flags: AttachmentDescriptionFlags,
) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn format(mut self, format: Format) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.format = format;
self
}
pub fn samples(mut self, samples: SampleCountFlags) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.samples = samples;
self
}
pub fn load_op(mut self, load_op: AttachmentLoadOp) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.load_op = load_op;
self
}
pub fn store_op(mut self, store_op: AttachmentStoreOp) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.store_op = store_op;
self
}
pub fn stencil_load_op(
mut self,
stencil_load_op: AttachmentLoadOp,
) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.stencil_load_op = stencil_load_op;
self
}
pub fn stencil_store_op(
mut self,
stencil_store_op: AttachmentStoreOp,
) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.stencil_store_op = stencil_store_op;
self
}
pub fn initial_layout(
mut self,
initial_layout: ImageLayout,
) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.initial_layout = initial_layout;
self
}
pub fn final_layout(
mut self,
final_layout: ImageLayout,
) -> AttachmentDescription2KHRBuilder<'a> {
self.inner.final_layout = final_layout;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAttachmentDescription2KHR>(
mut self,
next: &'a mut T,
) -> AttachmentDescription2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AttachmentDescription2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentReference2KHR.html>"]
pub struct AttachmentReference2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub attachment: u32,
pub layout: ImageLayout,
pub aspect_mask: ImageAspectFlags,
}
impl ::std::default::Default for AttachmentReference2KHR {
fn default() -> AttachmentReference2KHR {
AttachmentReference2KHR {
s_type: StructureType::ATTACHMENT_REFERENCE_2_KHR,
p_next: ::std::ptr::null(),
attachment: u32::default(),
layout: ImageLayout::default(),
aspect_mask: ImageAspectFlags::default(),
}
}
}
impl AttachmentReference2KHR {
pub fn builder<'a>() -> AttachmentReference2KHRBuilder<'a> {
AttachmentReference2KHRBuilder {
inner: AttachmentReference2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AttachmentReference2KHRBuilder<'a> {
inner: AttachmentReference2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAttachmentReference2KHR {}
impl<'a> ::std::ops::Deref for AttachmentReference2KHRBuilder<'a> {
type Target = AttachmentReference2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AttachmentReference2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AttachmentReference2KHRBuilder<'a> {
pub fn attachment(mut self, attachment: u32) -> AttachmentReference2KHRBuilder<'a> {
self.inner.attachment = attachment;
self
}
pub fn layout(mut self, layout: ImageLayout) -> AttachmentReference2KHRBuilder<'a> {
self.inner.layout = layout;
self
}
pub fn aspect_mask(
mut self,
aspect_mask: ImageAspectFlags,
) -> AttachmentReference2KHRBuilder<'a> {
self.inner.aspect_mask = aspect_mask;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAttachmentReference2KHR>(
mut self,
next: &'a mut T,
) -> AttachmentReference2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AttachmentReference2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDescription2KHR.html>"]
pub struct SubpassDescription2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: SubpassDescriptionFlags,
pub pipeline_bind_point: PipelineBindPoint,
pub view_mask: u32,
pub input_attachment_count: u32,
pub p_input_attachments: *const AttachmentReference2KHR,
pub color_attachment_count: u32,
pub p_color_attachments: *const AttachmentReference2KHR,
pub p_resolve_attachments: *const AttachmentReference2KHR,
pub p_depth_stencil_attachment: *const AttachmentReference2KHR,
pub preserve_attachment_count: u32,
pub p_preserve_attachments: *const u32,
}
impl ::std::default::Default for SubpassDescription2KHR {
fn default() -> SubpassDescription2KHR {
SubpassDescription2KHR {
s_type: StructureType::SUBPASS_DESCRIPTION_2_KHR,
p_next: ::std::ptr::null(),
flags: SubpassDescriptionFlags::default(),
pipeline_bind_point: PipelineBindPoint::default(),
view_mask: u32::default(),
input_attachment_count: u32::default(),
p_input_attachments: ::std::ptr::null(),
color_attachment_count: u32::default(),
p_color_attachments: ::std::ptr::null(),
p_resolve_attachments: ::std::ptr::null(),
p_depth_stencil_attachment: ::std::ptr::null(),
preserve_attachment_count: u32::default(),
p_preserve_attachments: ::std::ptr::null(),
}
}
}
impl SubpassDescription2KHR {
pub fn builder<'a>() -> SubpassDescription2KHRBuilder<'a> {
SubpassDescription2KHRBuilder {
inner: SubpassDescription2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassDescription2KHRBuilder<'a> {
inner: SubpassDescription2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSubpassDescription2KHR {}
impl<'a> ::std::ops::Deref for SubpassDescription2KHRBuilder<'a> {
type Target = SubpassDescription2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassDescription2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassDescription2KHRBuilder<'a> {
pub fn flags(mut self, flags: SubpassDescriptionFlags) -> SubpassDescription2KHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn pipeline_bind_point(
mut self,
pipeline_bind_point: PipelineBindPoint,
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.pipeline_bind_point = pipeline_bind_point;
self
}
pub fn view_mask(mut self, view_mask: u32) -> SubpassDescription2KHRBuilder<'a> {
self.inner.view_mask = view_mask;
self
}
pub fn input_attachments(
mut self,
input_attachments: &'a [AttachmentReference2KHR],
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.input_attachment_count = input_attachments.len() as _;
self.inner.p_input_attachments = input_attachments.as_ptr();
self
}
pub fn color_attachments(
mut self,
color_attachments: &'a [AttachmentReference2KHR],
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.color_attachment_count = color_attachments.len() as _;
self.inner.p_color_attachments = color_attachments.as_ptr();
self
}
pub fn resolve_attachments(
mut self,
resolve_attachments: &'a [AttachmentReference2KHR],
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.color_attachment_count = resolve_attachments.len() as _;
self.inner.p_resolve_attachments = resolve_attachments.as_ptr();
self
}
pub fn depth_stencil_attachment(
mut self,
depth_stencil_attachment: &'a AttachmentReference2KHR,
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.p_depth_stencil_attachment = depth_stencil_attachment;
self
}
pub fn preserve_attachments(
mut self,
preserve_attachments: &'a [u32],
) -> SubpassDescription2KHRBuilder<'a> {
self.inner.preserve_attachment_count = preserve_attachments.len() as _;
self.inner.p_preserve_attachments = preserve_attachments.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSubpassDescription2KHR>(
mut self,
next: &'a mut T,
) -> SubpassDescription2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassDescription2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDependency2KHR.html>"]
pub struct SubpassDependency2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub src_subpass: u32,
pub dst_subpass: u32,
pub src_stage_mask: PipelineStageFlags,
pub dst_stage_mask: PipelineStageFlags,
pub src_access_mask: AccessFlags,
pub dst_access_mask: AccessFlags,
pub dependency_flags: DependencyFlags,
pub view_offset: i32,
}
impl ::std::default::Default for SubpassDependency2KHR {
fn default() -> SubpassDependency2KHR {
SubpassDependency2KHR {
s_type: StructureType::SUBPASS_DEPENDENCY_2_KHR,
p_next: ::std::ptr::null(),
src_subpass: u32::default(),
dst_subpass: u32::default(),
src_stage_mask: PipelineStageFlags::default(),
dst_stage_mask: PipelineStageFlags::default(),
src_access_mask: AccessFlags::default(),
dst_access_mask: AccessFlags::default(),
dependency_flags: DependencyFlags::default(),
view_offset: i32::default(),
}
}
}
impl SubpassDependency2KHR {
pub fn builder<'a>() -> SubpassDependency2KHRBuilder<'a> {
SubpassDependency2KHRBuilder {
inner: SubpassDependency2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassDependency2KHRBuilder<'a> {
inner: SubpassDependency2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSubpassDependency2KHR {}
impl<'a> ::std::ops::Deref for SubpassDependency2KHRBuilder<'a> {
type Target = SubpassDependency2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassDependency2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassDependency2KHRBuilder<'a> {
pub fn src_subpass(mut self, src_subpass: u32) -> SubpassDependency2KHRBuilder<'a> {
self.inner.src_subpass = src_subpass;
self
}
pub fn dst_subpass(mut self, dst_subpass: u32) -> SubpassDependency2KHRBuilder<'a> {
self.inner.dst_subpass = dst_subpass;
self
}
pub fn src_stage_mask(
mut self,
src_stage_mask: PipelineStageFlags,
) -> SubpassDependency2KHRBuilder<'a> {
self.inner.src_stage_mask = src_stage_mask;
self
}
pub fn dst_stage_mask(
mut self,
dst_stage_mask: PipelineStageFlags,
) -> SubpassDependency2KHRBuilder<'a> {
self.inner.dst_stage_mask = dst_stage_mask;
self
}
pub fn src_access_mask(
mut self,
src_access_mask: AccessFlags,
) -> SubpassDependency2KHRBuilder<'a> {
self.inner.src_access_mask = src_access_mask;
self
}
pub fn dst_access_mask(
mut self,
dst_access_mask: AccessFlags,
) -> SubpassDependency2KHRBuilder<'a> {
self.inner.dst_access_mask = dst_access_mask;
self
}
pub fn dependency_flags(
mut self,
dependency_flags: DependencyFlags,
) -> SubpassDependency2KHRBuilder<'a> {
self.inner.dependency_flags = dependency_flags;
self
}
pub fn view_offset(mut self, view_offset: i32) -> SubpassDependency2KHRBuilder<'a> {
self.inner.view_offset = view_offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSubpassDependency2KHR>(
mut self,
next: &'a mut T,
) -> SubpassDependency2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassDependency2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassCreateInfo2KHR.html>"]
pub struct RenderPassCreateInfo2KHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: RenderPassCreateFlags,
pub attachment_count: u32,
pub p_attachments: *const AttachmentDescription2KHR,
pub subpass_count: u32,
pub p_subpasses: *const SubpassDescription2KHR,
pub dependency_count: u32,
pub p_dependencies: *const SubpassDependency2KHR,
pub correlated_view_mask_count: u32,
pub p_correlated_view_masks: *const u32,
}
impl ::std::default::Default for RenderPassCreateInfo2KHR {
fn default() -> RenderPassCreateInfo2KHR {
RenderPassCreateInfo2KHR {
s_type: StructureType::RENDER_PASS_CREATE_INFO_2_KHR,
p_next: ::std::ptr::null(),
flags: RenderPassCreateFlags::default(),
attachment_count: u32::default(),
p_attachments: ::std::ptr::null(),
subpass_count: u32::default(),
p_subpasses: ::std::ptr::null(),
dependency_count: u32::default(),
p_dependencies: ::std::ptr::null(),
correlated_view_mask_count: u32::default(),
p_correlated_view_masks: ::std::ptr::null(),
}
}
}
impl RenderPassCreateInfo2KHR {
pub fn builder<'a>() -> RenderPassCreateInfo2KHRBuilder<'a> {
RenderPassCreateInfo2KHRBuilder {
inner: RenderPassCreateInfo2KHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassCreateInfo2KHRBuilder<'a> {
inner: RenderPassCreateInfo2KHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsRenderPassCreateInfo2KHR {}
impl<'a> ::std::ops::Deref for RenderPassCreateInfo2KHRBuilder<'a> {
type Target = RenderPassCreateInfo2KHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassCreateInfo2KHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassCreateInfo2KHRBuilder<'a> {
pub fn flags(mut self, flags: RenderPassCreateFlags) -> RenderPassCreateInfo2KHRBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn attachments(
mut self,
attachments: &'a [AttachmentDescription2KHR],
) -> RenderPassCreateInfo2KHRBuilder<'a> {
self.inner.attachment_count = attachments.len() as _;
self.inner.p_attachments = attachments.as_ptr();
self
}
pub fn subpasses(
mut self,
subpasses: &'a [SubpassDescription2KHR],
) -> RenderPassCreateInfo2KHRBuilder<'a> {
self.inner.subpass_count = subpasses.len() as _;
self.inner.p_subpasses = subpasses.as_ptr();
self
}
pub fn dependencies(
mut self,
dependencies: &'a [SubpassDependency2KHR],
) -> RenderPassCreateInfo2KHRBuilder<'a> {
self.inner.dependency_count = dependencies.len() as _;
self.inner.p_dependencies = dependencies.as_ptr();
self
}
pub fn correlated_view_masks(
mut self,
correlated_view_masks: &'a [u32],
) -> RenderPassCreateInfo2KHRBuilder<'a> {
self.inner.correlated_view_mask_count = correlated_view_masks.len() as _;
self.inner.p_correlated_view_masks = correlated_view_masks.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsRenderPassCreateInfo2KHR>(
mut self,
next: &'a mut T,
) -> RenderPassCreateInfo2KHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassCreateInfo2KHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassBeginInfoKHR.html>"]
pub struct SubpassBeginInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub contents: SubpassContents,
}
impl ::std::default::Default for SubpassBeginInfoKHR {
fn default() -> SubpassBeginInfoKHR {
SubpassBeginInfoKHR {
s_type: StructureType::SUBPASS_BEGIN_INFO_KHR,
p_next: ::std::ptr::null(),
contents: SubpassContents::default(),
}
}
}
impl SubpassBeginInfoKHR {
pub fn builder<'a>() -> SubpassBeginInfoKHRBuilder<'a> {
SubpassBeginInfoKHRBuilder {
inner: SubpassBeginInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassBeginInfoKHRBuilder<'a> {
inner: SubpassBeginInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSubpassBeginInfoKHR {}
impl<'a> ::std::ops::Deref for SubpassBeginInfoKHRBuilder<'a> {
type Target = SubpassBeginInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassBeginInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassBeginInfoKHRBuilder<'a> {
pub fn contents(mut self, contents: SubpassContents) -> SubpassBeginInfoKHRBuilder<'a> {
self.inner.contents = contents;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSubpassBeginInfoKHR>(
mut self,
next: &'a mut T,
) -> SubpassBeginInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassBeginInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassEndInfoKHR.html>"]
pub struct SubpassEndInfoKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
}
impl ::std::default::Default for SubpassEndInfoKHR {
fn default() -> SubpassEndInfoKHR {
SubpassEndInfoKHR {
s_type: StructureType::SUBPASS_END_INFO_KHR,
p_next: ::std::ptr::null(),
}
}
}
impl SubpassEndInfoKHR {
pub fn builder<'a>() -> SubpassEndInfoKHRBuilder<'a> {
SubpassEndInfoKHRBuilder {
inner: SubpassEndInfoKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassEndInfoKHRBuilder<'a> {
inner: SubpassEndInfoKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsSubpassEndInfoKHR {}
impl<'a> ::std::ops::Deref for SubpassEndInfoKHRBuilder<'a> {
type Target = SubpassEndInfoKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassEndInfoKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassEndInfoKHRBuilder<'a> {
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsSubpassEndInfoKHR>(
mut self,
next: &'a mut T,
) -> SubpassEndInfoKHRBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassEndInfoKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkVertexInputBindingDivisorDescriptionEXT.html>"]
pub struct VertexInputBindingDivisorDescriptionEXT {
pub binding: u32,
pub divisor: u32,
}
impl VertexInputBindingDivisorDescriptionEXT {
pub fn builder<'a>() -> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
VertexInputBindingDivisorDescriptionEXTBuilder {
inner: VertexInputBindingDivisorDescriptionEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
inner: VertexInputBindingDivisorDescriptionEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
type Target = VertexInputBindingDivisorDescriptionEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
pub fn binding(mut self, binding: u32) -> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
self.inner.binding = binding;
self
}
pub fn divisor(mut self, divisor: u32) -> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
self.inner.divisor = divisor;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> VertexInputBindingDivisorDescriptionEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineVertexInputDivisorStateCreateInfoEXT.html>"]
pub struct PipelineVertexInputDivisorStateCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub vertex_binding_divisor_count: u32,
pub p_vertex_binding_divisors: *const VertexInputBindingDivisorDescriptionEXT,
}
impl ::std::default::Default for PipelineVertexInputDivisorStateCreateInfoEXT {
fn default() -> PipelineVertexInputDivisorStateCreateInfoEXT {
PipelineVertexInputDivisorStateCreateInfoEXT {
s_type: StructureType::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
vertex_binding_divisor_count: u32::default(),
p_vertex_binding_divisors: ::std::ptr::null(),
}
}
}
impl PipelineVertexInputDivisorStateCreateInfoEXT {
pub fn builder<'a>() -> PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
PipelineVertexInputDivisorStateCreateInfoEXTBuilder {
inner: PipelineVertexInputDivisorStateCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
inner: PipelineVertexInputDivisorStateCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineVertexInputStateCreateInfo
for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPipelineVertexInputStateCreateInfo
for PipelineVertexInputDivisorStateCreateInfoEXT
{
}
impl<'a> ::std::ops::Deref for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
type Target = PipelineVertexInputDivisorStateCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
pub fn vertex_binding_divisors(
mut self,
vertex_binding_divisors: &'a [VertexInputBindingDivisorDescriptionEXT],
) -> PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
self.inner.vertex_binding_divisor_count = vertex_binding_divisors.len() as _;
self.inner.p_vertex_binding_divisors = vertex_binding_divisors.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineVertexInputDivisorStateCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT.html>"]
pub struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_vertex_attrib_divisor: u32,
}
impl ::std::default::Default for PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
fn default() -> PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_vertex_attrib_divisor: u32::default(),
}
}
}
impl PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder {
inner: PhysicalDeviceVertexAttributeDivisorPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceVertexAttributeDivisorPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVertexAttributeDivisorPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
pub fn max_vertex_attrib_divisor(
mut self,
max_vertex_attrib_divisor: u32,
) -> PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
self.inner.max_vertex_attrib_divisor = max_vertex_attrib_divisor;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html>"]
pub struct PhysicalDevicePCIBusInfoPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub pci_domain: u32,
pub pci_bus: u32,
pub pci_device: u32,
pub pci_function: u32,
}
impl ::std::default::Default for PhysicalDevicePCIBusInfoPropertiesEXT {
fn default() -> PhysicalDevicePCIBusInfoPropertiesEXT {
PhysicalDevicePCIBusInfoPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
pci_domain: u32::default(),
pci_bus: u32::default(),
pci_device: u32::default(),
pci_function: u32::default(),
}
}
}
impl PhysicalDevicePCIBusInfoPropertiesEXT {
pub fn builder<'a>() -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
PhysicalDevicePCIBusInfoPropertiesEXTBuilder {
inner: PhysicalDevicePCIBusInfoPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
inner: PhysicalDevicePCIBusInfoPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePCIBusInfoPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
type Target = PhysicalDevicePCIBusInfoPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
pub fn pci_domain(
mut self,
pci_domain: u32,
) -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
self.inner.pci_domain = pci_domain;
self
}
pub fn pci_bus(mut self, pci_bus: u32) -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
self.inner.pci_bus = pci_bus;
self
}
pub fn pci_device(
mut self,
pci_device: u32,
) -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
self.inner.pci_device = pci_device;
self
}
pub fn pci_function(
mut self,
pci_function: u32,
) -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
self.inner.pci_function = pci_function;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDevicePCIBusInfoPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImportAndroidHardwareBufferInfoANDROID.html>"]
pub struct ImportAndroidHardwareBufferInfoANDROID {
pub s_type: StructureType,
pub p_next: *const c_void,
pub buffer: *mut AHardwareBuffer,
}
impl ::std::default::Default for ImportAndroidHardwareBufferInfoANDROID {
fn default() -> ImportAndroidHardwareBufferInfoANDROID {
ImportAndroidHardwareBufferInfoANDROID {
s_type: StructureType::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
p_next: ::std::ptr::null(),
buffer: ::std::ptr::null_mut(),
}
}
}
impl ImportAndroidHardwareBufferInfoANDROID {
pub fn builder<'a>() -> ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
ImportAndroidHardwareBufferInfoANDROIDBuilder {
inner: ImportAndroidHardwareBufferInfoANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
inner: ImportAndroidHardwareBufferInfoANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for ImportAndroidHardwareBufferInfoANDROIDBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for ImportAndroidHardwareBufferInfoANDROID {}
impl<'a> ::std::ops::Deref for ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
type Target = ImportAndroidHardwareBufferInfoANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
pub fn buffer(
mut self,
buffer: *mut AHardwareBuffer,
) -> ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImportAndroidHardwareBufferInfoANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAndroidHardwareBufferUsageANDROID.html>"]
pub struct AndroidHardwareBufferUsageANDROID {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub android_hardware_buffer_usage: u64,
}
impl ::std::default::Default for AndroidHardwareBufferUsageANDROID {
fn default() -> AndroidHardwareBufferUsageANDROID {
AndroidHardwareBufferUsageANDROID {
s_type: StructureType::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
p_next: ::std::ptr::null_mut(),
android_hardware_buffer_usage: u64::default(),
}
}
}
impl AndroidHardwareBufferUsageANDROID {
pub fn builder<'a>() -> AndroidHardwareBufferUsageANDROIDBuilder<'a> {
AndroidHardwareBufferUsageANDROIDBuilder {
inner: AndroidHardwareBufferUsageANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AndroidHardwareBufferUsageANDROIDBuilder<'a> {
inner: AndroidHardwareBufferUsageANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageFormatProperties2 for AndroidHardwareBufferUsageANDROIDBuilder<'_> {}
unsafe impl ExtendsImageFormatProperties2 for AndroidHardwareBufferUsageANDROID {}
impl<'a> ::std::ops::Deref for AndroidHardwareBufferUsageANDROIDBuilder<'a> {
type Target = AndroidHardwareBufferUsageANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferUsageANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AndroidHardwareBufferUsageANDROIDBuilder<'a> {
pub fn android_hardware_buffer_usage(
mut self,
android_hardware_buffer_usage: u64,
) -> AndroidHardwareBufferUsageANDROIDBuilder<'a> {
self.inner.android_hardware_buffer_usage = android_hardware_buffer_usage;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AndroidHardwareBufferUsageANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAndroidHardwareBufferPropertiesANDROID.html>"]
pub struct AndroidHardwareBufferPropertiesANDROID {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub allocation_size: DeviceSize,
pub memory_type_bits: u32,
}
impl ::std::default::Default for AndroidHardwareBufferPropertiesANDROID {
fn default() -> AndroidHardwareBufferPropertiesANDROID {
AndroidHardwareBufferPropertiesANDROID {
s_type: StructureType::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
p_next: ::std::ptr::null_mut(),
allocation_size: DeviceSize::default(),
memory_type_bits: u32::default(),
}
}
}
impl AndroidHardwareBufferPropertiesANDROID {
pub fn builder<'a>() -> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
AndroidHardwareBufferPropertiesANDROIDBuilder {
inner: AndroidHardwareBufferPropertiesANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
inner: AndroidHardwareBufferPropertiesANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAndroidHardwareBufferPropertiesANDROID {}
impl<'a> ::std::ops::Deref for AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
type Target = AndroidHardwareBufferPropertiesANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
pub fn allocation_size(
mut self,
allocation_size: DeviceSize,
) -> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
self.inner.allocation_size = allocation_size;
self
}
pub fn memory_type_bits(
mut self,
memory_type_bits: u32,
) -> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
self.inner.memory_type_bits = memory_type_bits;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAndroidHardwareBufferPropertiesANDROID>(
mut self,
next: &'a mut T,
) -> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AndroidHardwareBufferPropertiesANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html>"]
pub struct MemoryGetAndroidHardwareBufferInfoANDROID {
pub s_type: StructureType,
pub p_next: *const c_void,
pub memory: DeviceMemory,
}
impl ::std::default::Default for MemoryGetAndroidHardwareBufferInfoANDROID {
fn default() -> MemoryGetAndroidHardwareBufferInfoANDROID {
MemoryGetAndroidHardwareBufferInfoANDROID {
s_type: StructureType::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
p_next: ::std::ptr::null(),
memory: DeviceMemory::default(),
}
}
}
impl MemoryGetAndroidHardwareBufferInfoANDROID {
pub fn builder<'a>() -> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
MemoryGetAndroidHardwareBufferInfoANDROIDBuilder {
inner: MemoryGetAndroidHardwareBufferInfoANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
inner: MemoryGetAndroidHardwareBufferInfoANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsMemoryGetAndroidHardwareBufferInfoANDROID {}
impl<'a> ::std::ops::Deref for MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
type Target = MemoryGetAndroidHardwareBufferInfoANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
pub fn memory(
mut self,
memory: DeviceMemory,
) -> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
self.inner.memory = memory;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsMemoryGetAndroidHardwareBufferInfoANDROID>(
mut self,
next: &'a mut T,
) -> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryGetAndroidHardwareBufferInfoANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html>"]
pub struct AndroidHardwareBufferFormatPropertiesANDROID {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub format: Format,
pub external_format: u64,
pub format_features: FormatFeatureFlags,
pub sampler_ycbcr_conversion_components: ComponentMapping,
pub suggested_ycbcr_model: SamplerYcbcrModelConversion,
pub suggested_ycbcr_range: SamplerYcbcrRange,
pub suggested_x_chroma_offset: ChromaLocation,
pub suggested_y_chroma_offset: ChromaLocation,
}
impl ::std::default::Default for AndroidHardwareBufferFormatPropertiesANDROID {
fn default() -> AndroidHardwareBufferFormatPropertiesANDROID {
AndroidHardwareBufferFormatPropertiesANDROID {
s_type: StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
p_next: ::std::ptr::null_mut(),
format: Format::default(),
external_format: u64::default(),
format_features: FormatFeatureFlags::default(),
sampler_ycbcr_conversion_components: ComponentMapping::default(),
suggested_ycbcr_model: SamplerYcbcrModelConversion::default(),
suggested_ycbcr_range: SamplerYcbcrRange::default(),
suggested_x_chroma_offset: ChromaLocation::default(),
suggested_y_chroma_offset: ChromaLocation::default(),
}
}
}
impl AndroidHardwareBufferFormatPropertiesANDROID {
pub fn builder<'a>() -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
AndroidHardwareBufferFormatPropertiesANDROIDBuilder {
inner: AndroidHardwareBufferFormatPropertiesANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
inner: AndroidHardwareBufferFormatPropertiesANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'_>
{
}
unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
for AndroidHardwareBufferFormatPropertiesANDROID
{
}
impl<'a> ::std::ops::Deref for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
type Target = AndroidHardwareBufferFormatPropertiesANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
pub fn format(
mut self,
format: Format,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.format = format;
self
}
pub fn external_format(
mut self,
external_format: u64,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.external_format = external_format;
self
}
pub fn format_features(
mut self,
format_features: FormatFeatureFlags,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.format_features = format_features;
self
}
pub fn sampler_ycbcr_conversion_components(
mut self,
sampler_ycbcr_conversion_components: ComponentMapping,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components;
self
}
pub fn suggested_ycbcr_model(
mut self,
suggested_ycbcr_model: SamplerYcbcrModelConversion,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.suggested_ycbcr_model = suggested_ycbcr_model;
self
}
pub fn suggested_ycbcr_range(
mut self,
suggested_ycbcr_range: SamplerYcbcrRange,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.suggested_ycbcr_range = suggested_ycbcr_range;
self
}
pub fn suggested_x_chroma_offset(
mut self,
suggested_x_chroma_offset: ChromaLocation,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.suggested_x_chroma_offset = suggested_x_chroma_offset;
self
}
pub fn suggested_y_chroma_offset(
mut self,
suggested_y_chroma_offset: ChromaLocation,
) -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
self.inner.suggested_y_chroma_offset = suggested_y_chroma_offset;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AndroidHardwareBufferFormatPropertiesANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html>"]
pub struct CommandBufferInheritanceConditionalRenderingInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub conditional_rendering_enable: Bool32,
}
impl ::std::default::Default for CommandBufferInheritanceConditionalRenderingInfoEXT {
fn default() -> CommandBufferInheritanceConditionalRenderingInfoEXT {
CommandBufferInheritanceConditionalRenderingInfoEXT {
s_type: StructureType::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
p_next: ::std::ptr::null(),
conditional_rendering_enable: Bool32::default(),
}
}
}
impl CommandBufferInheritanceConditionalRenderingInfoEXT {
pub fn builder<'a>() -> CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
CommandBufferInheritanceConditionalRenderingInfoEXTBuilder {
inner: CommandBufferInheritanceConditionalRenderingInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
inner: CommandBufferInheritanceConditionalRenderingInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsCommandBufferInheritanceInfo
for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsCommandBufferInheritanceInfo
for CommandBufferInheritanceConditionalRenderingInfoEXT
{
}
impl<'a> ::std::ops::Deref for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
type Target = CommandBufferInheritanceConditionalRenderingInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
pub fn conditional_rendering_enable(
mut self,
conditional_rendering_enable: bool,
) -> CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
self.inner.conditional_rendering_enable = conditional_rendering_enable.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CommandBufferInheritanceConditionalRenderingInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalFormatANDROID.html>"]
pub struct ExternalFormatANDROID {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub external_format: u64,
}
impl ::std::default::Default for ExternalFormatANDROID {
fn default() -> ExternalFormatANDROID {
ExternalFormatANDROID {
s_type: StructureType::EXTERNAL_FORMAT_ANDROID,
p_next: ::std::ptr::null_mut(),
external_format: u64::default(),
}
}
}
impl ExternalFormatANDROID {
pub fn builder<'a>() -> ExternalFormatANDROIDBuilder<'a> {
ExternalFormatANDROIDBuilder {
inner: ExternalFormatANDROID::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ExternalFormatANDROIDBuilder<'a> {
inner: ExternalFormatANDROID,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ExternalFormatANDROIDBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ExternalFormatANDROID {}
unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatANDROIDBuilder<'_> {}
unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatANDROID {}
impl<'a> ::std::ops::Deref for ExternalFormatANDROIDBuilder<'a> {
type Target = ExternalFormatANDROID;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ExternalFormatANDROIDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ExternalFormatANDROIDBuilder<'a> {
pub fn external_format(mut self, external_format: u64) -> ExternalFormatANDROIDBuilder<'a> {
self.inner.external_format = external_format;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ExternalFormatANDROID {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDevice8BitStorageFeaturesKHR.html>"]
pub struct PhysicalDevice8BitStorageFeaturesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub storage_buffer8_bit_access: Bool32,
pub uniform_and_storage_buffer8_bit_access: Bool32,
pub storage_push_constant8: Bool32,
}
impl ::std::default::Default for PhysicalDevice8BitStorageFeaturesKHR {
fn default() -> PhysicalDevice8BitStorageFeaturesKHR {
PhysicalDevice8BitStorageFeaturesKHR {
s_type: StructureType::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
p_next: ::std::ptr::null_mut(),
storage_buffer8_bit_access: Bool32::default(),
uniform_and_storage_buffer8_bit_access: Bool32::default(),
storage_push_constant8: Bool32::default(),
}
}
}
impl PhysicalDevice8BitStorageFeaturesKHR {
pub fn builder<'a>() -> PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
PhysicalDevice8BitStorageFeaturesKHRBuilder {
inner: PhysicalDevice8BitStorageFeaturesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
inner: PhysicalDevice8BitStorageFeaturesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice8BitStorageFeaturesKHRBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice8BitStorageFeaturesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
type Target = PhysicalDevice8BitStorageFeaturesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
pub fn storage_buffer8_bit_access(
mut self,
storage_buffer8_bit_access: bool,
) -> PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
self.inner.storage_buffer8_bit_access = storage_buffer8_bit_access.into();
self
}
pub fn uniform_and_storage_buffer8_bit_access(
mut self,
uniform_and_storage_buffer8_bit_access: bool,
) -> PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
self.inner.uniform_and_storage_buffer8_bit_access =
uniform_and_storage_buffer8_bit_access.into();
self
}
pub fn storage_push_constant8(
mut self,
storage_push_constant8: bool,
) -> PhysicalDevice8BitStorageFeaturesKHRBuilder<'a> {
self.inner.storage_push_constant8 = storage_push_constant8.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDevice8BitStorageFeaturesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html>"]
pub struct PhysicalDeviceConditionalRenderingFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub conditional_rendering: Bool32,
pub inherited_conditional_rendering: Bool32,
}
impl ::std::default::Default for PhysicalDeviceConditionalRenderingFeaturesEXT {
fn default() -> PhysicalDeviceConditionalRenderingFeaturesEXT {
PhysicalDeviceConditionalRenderingFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
conditional_rendering: Bool32::default(),
inherited_conditional_rendering: Bool32::default(),
}
}
}
impl PhysicalDeviceConditionalRenderingFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
PhysicalDeviceConditionalRenderingFeaturesEXTBuilder {
inner: PhysicalDeviceConditionalRenderingFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceConditionalRenderingFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceConditionalRenderingFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceConditionalRenderingFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
pub fn conditional_rendering(
mut self,
conditional_rendering: bool,
) -> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
self.inner.conditional_rendering = conditional_rendering.into();
self
}
pub fn inherited_conditional_rendering(
mut self,
inherited_conditional_rendering: bool,
) -> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
self.inner.inherited_conditional_rendering = inherited_conditional_rendering.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceConditionalRenderingFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceVulkanMemoryModelFeaturesKHR.html>"]
pub struct PhysicalDeviceVulkanMemoryModelFeaturesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub vulkan_memory_model: Bool32,
pub vulkan_memory_model_device_scope: Bool32,
}
impl ::std::default::Default for PhysicalDeviceVulkanMemoryModelFeaturesKHR {
fn default() -> PhysicalDeviceVulkanMemoryModelFeaturesKHR {
PhysicalDeviceVulkanMemoryModelFeaturesKHR {
s_type: StructureType::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
p_next: ::std::ptr::null_mut(),
vulkan_memory_model: Bool32::default(),
vulkan_memory_model_device_scope: Bool32::default(),
}
}
}
impl PhysicalDeviceVulkanMemoryModelFeaturesKHR {
pub fn builder<'a>() -> PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder {
inner: PhysicalDeviceVulkanMemoryModelFeaturesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
inner: PhysicalDeviceVulkanMemoryModelFeaturesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkanMemoryModelFeaturesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
type Target = PhysicalDeviceVulkanMemoryModelFeaturesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
pub fn vulkan_memory_model(
mut self,
vulkan_memory_model: bool,
) -> PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
self.inner.vulkan_memory_model = vulkan_memory_model.into();
self
}
pub fn vulkan_memory_model_device_scope(
mut self,
vulkan_memory_model_device_scope: bool,
) -> PhysicalDeviceVulkanMemoryModelFeaturesKHRBuilder<'a> {
self.inner.vulkan_memory_model_device_scope = vulkan_memory_model_device_scope.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceVulkanMemoryModelFeaturesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShaderAtomicInt64FeaturesKHR.html>"]
pub struct PhysicalDeviceShaderAtomicInt64FeaturesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_buffer_int64_atomics: Bool32,
pub shader_shared_int64_atomics: Bool32,
}
impl ::std::default::Default for PhysicalDeviceShaderAtomicInt64FeaturesKHR {
fn default() -> PhysicalDeviceShaderAtomicInt64FeaturesKHR {
PhysicalDeviceShaderAtomicInt64FeaturesKHR {
s_type: StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
p_next: ::std::ptr::null_mut(),
shader_buffer_int64_atomics: Bool32::default(),
shader_shared_int64_atomics: Bool32::default(),
}
}
}
impl PhysicalDeviceShaderAtomicInt64FeaturesKHR {
pub fn builder<'a>() -> PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder {
inner: PhysicalDeviceShaderAtomicInt64FeaturesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
inner: PhysicalDeviceShaderAtomicInt64FeaturesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicInt64FeaturesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
type Target = PhysicalDeviceShaderAtomicInt64FeaturesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
pub fn shader_buffer_int64_atomics(
mut self,
shader_buffer_int64_atomics: bool,
) -> PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
self.inner.shader_buffer_int64_atomics = shader_buffer_int64_atomics.into();
self
}
pub fn shader_shared_int64_atomics(
mut self,
shader_shared_int64_atomics: bool,
) -> PhysicalDeviceShaderAtomicInt64FeaturesKHRBuilder<'a> {
self.inner.shader_shared_int64_atomics = shader_shared_int64_atomics.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShaderAtomicInt64FeaturesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT.html>"]
pub struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub vertex_attribute_instance_rate_divisor: Bool32,
pub vertex_attribute_instance_rate_zero_divisor: Bool32,
}
impl ::std::default::Default for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
fn default() -> PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
vertex_attribute_instance_rate_divisor: Bool32::default(),
vertex_attribute_instance_rate_zero_divisor: Bool32::default(),
}
}
}
impl PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder {
inner: PhysicalDeviceVertexAttributeDivisorFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceVertexAttributeDivisorFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
pub fn vertex_attribute_instance_rate_divisor(
mut self,
vertex_attribute_instance_rate_divisor: bool,
) -> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
self.inner.vertex_attribute_instance_rate_divisor =
vertex_attribute_instance_rate_divisor.into();
self
}
pub fn vertex_attribute_instance_rate_zero_divisor(
mut self,
vertex_attribute_instance_rate_zero_divisor: bool,
) -> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
self.inner.vertex_attribute_instance_rate_zero_divisor =
vertex_attribute_instance_rate_zero_divisor.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueueFamilyCheckpointPropertiesNV.html>"]
pub struct QueueFamilyCheckpointPropertiesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub checkpoint_execution_stage_mask: PipelineStageFlags,
}
impl ::std::default::Default for QueueFamilyCheckpointPropertiesNV {
fn default() -> QueueFamilyCheckpointPropertiesNV {
QueueFamilyCheckpointPropertiesNV {
s_type: StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
p_next: ::std::ptr::null_mut(),
checkpoint_execution_stage_mask: PipelineStageFlags::default(),
}
}
}
impl QueueFamilyCheckpointPropertiesNV {
pub fn builder<'a>() -> QueueFamilyCheckpointPropertiesNVBuilder<'a> {
QueueFamilyCheckpointPropertiesNVBuilder {
inner: QueueFamilyCheckpointPropertiesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct QueueFamilyCheckpointPropertiesNVBuilder<'a> {
inner: QueueFamilyCheckpointPropertiesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointPropertiesNVBuilder<'_> {}
unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointPropertiesNV {}
impl<'a> ::std::ops::Deref for QueueFamilyCheckpointPropertiesNVBuilder<'a> {
type Target = QueueFamilyCheckpointPropertiesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for QueueFamilyCheckpointPropertiesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> QueueFamilyCheckpointPropertiesNVBuilder<'a> {
pub fn checkpoint_execution_stage_mask(
mut self,
checkpoint_execution_stage_mask: PipelineStageFlags,
) -> QueueFamilyCheckpointPropertiesNVBuilder<'a> {
self.inner.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> QueueFamilyCheckpointPropertiesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCheckpointDataNV.html>"]
pub struct CheckpointDataNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub stage: PipelineStageFlags,
pub p_checkpoint_marker: *mut c_void,
}
impl ::std::default::Default for CheckpointDataNV {
fn default() -> CheckpointDataNV {
CheckpointDataNV {
s_type: StructureType::CHECKPOINT_DATA_NV,
p_next: ::std::ptr::null_mut(),
stage: PipelineStageFlags::default(),
p_checkpoint_marker: ::std::ptr::null_mut(),
}
}
}
impl CheckpointDataNV {
pub fn builder<'a>() -> CheckpointDataNVBuilder<'a> {
CheckpointDataNVBuilder {
inner: CheckpointDataNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CheckpointDataNVBuilder<'a> {
inner: CheckpointDataNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsCheckpointDataNV {}
impl<'a> ::std::ops::Deref for CheckpointDataNVBuilder<'a> {
type Target = CheckpointDataNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CheckpointDataNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CheckpointDataNVBuilder<'a> {
pub fn stage(mut self, stage: PipelineStageFlags) -> CheckpointDataNVBuilder<'a> {
self.inner.stage = stage;
self
}
pub fn checkpoint_marker(
mut self,
checkpoint_marker: *mut c_void,
) -> CheckpointDataNVBuilder<'a> {
self.inner.p_checkpoint_marker = checkpoint_marker;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsCheckpointDataNV>(
mut self,
next: &'a mut T,
) -> CheckpointDataNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CheckpointDataNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceDepthStencilResolvePropertiesKHR.html>"]
pub struct PhysicalDeviceDepthStencilResolvePropertiesKHR {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub supported_depth_resolve_modes: ResolveModeFlagsKHR,
pub supported_stencil_resolve_modes: ResolveModeFlagsKHR,
pub independent_resolve_none: Bool32,
pub independent_resolve: Bool32,
}
impl ::std::default::Default for PhysicalDeviceDepthStencilResolvePropertiesKHR {
fn default() -> PhysicalDeviceDepthStencilResolvePropertiesKHR {
PhysicalDeviceDepthStencilResolvePropertiesKHR {
s_type: StructureType::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
p_next: ::std::ptr::null_mut(),
supported_depth_resolve_modes: ResolveModeFlagsKHR::default(),
supported_stencil_resolve_modes: ResolveModeFlagsKHR::default(),
independent_resolve_none: Bool32::default(),
independent_resolve: Bool32::default(),
}
}
}
impl PhysicalDeviceDepthStencilResolvePropertiesKHR {
pub fn builder<'a>() -> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder {
inner: PhysicalDeviceDepthStencilResolvePropertiesKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
inner: PhysicalDeviceDepthStencilResolvePropertiesKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDepthStencilResolvePropertiesKHR {}
impl<'a> ::std::ops::Deref for PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
type Target = PhysicalDeviceDepthStencilResolvePropertiesKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
pub fn supported_depth_resolve_modes(
mut self,
supported_depth_resolve_modes: ResolveModeFlagsKHR,
) -> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
self.inner.supported_depth_resolve_modes = supported_depth_resolve_modes;
self
}
pub fn supported_stencil_resolve_modes(
mut self,
supported_stencil_resolve_modes: ResolveModeFlagsKHR,
) -> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
self.inner.supported_stencil_resolve_modes = supported_stencil_resolve_modes;
self
}
pub fn independent_resolve_none(
mut self,
independent_resolve_none: bool,
) -> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
self.inner.independent_resolve_none = independent_resolve_none.into();
self
}
pub fn independent_resolve(
mut self,
independent_resolve: bool,
) -> PhysicalDeviceDepthStencilResolvePropertiesKHRBuilder<'a> {
self.inner.independent_resolve = independent_resolve.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceDepthStencilResolvePropertiesKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDescriptionDepthStencilResolveKHR.html>"]
pub struct SubpassDescriptionDepthStencilResolveKHR {
pub s_type: StructureType,
pub p_next: *const c_void,
pub depth_resolve_mode: ResolveModeFlagsKHR,
pub stencil_resolve_mode: ResolveModeFlagsKHR,
pub p_depth_stencil_resolve_attachment: *const AttachmentReference2KHR,
}
impl ::std::default::Default for SubpassDescriptionDepthStencilResolveKHR {
fn default() -> SubpassDescriptionDepthStencilResolveKHR {
SubpassDescriptionDepthStencilResolveKHR {
s_type: StructureType::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
p_next: ::std::ptr::null(),
depth_resolve_mode: ResolveModeFlagsKHR::default(),
stencil_resolve_mode: ResolveModeFlagsKHR::default(),
p_depth_stencil_resolve_attachment: ::std::ptr::null(),
}
}
}
impl SubpassDescriptionDepthStencilResolveKHR {
pub fn builder<'a>() -> SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
SubpassDescriptionDepthStencilResolveKHRBuilder {
inner: SubpassDescriptionDepthStencilResolveKHR::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
inner: SubpassDescriptionDepthStencilResolveKHR,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsSubpassDescription2KHR for SubpassDescriptionDepthStencilResolveKHRBuilder<'_> {}
unsafe impl ExtendsSubpassDescription2KHR for SubpassDescriptionDepthStencilResolveKHR {}
impl<'a> ::std::ops::Deref for SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
type Target = SubpassDescriptionDepthStencilResolveKHR;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
pub fn depth_resolve_mode(
mut self,
depth_resolve_mode: ResolveModeFlagsKHR,
) -> SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
self.inner.depth_resolve_mode = depth_resolve_mode;
self
}
pub fn stencil_resolve_mode(
mut self,
stencil_resolve_mode: ResolveModeFlagsKHR,
) -> SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
self.inner.stencil_resolve_mode = stencil_resolve_mode;
self
}
pub fn depth_stencil_resolve_attachment(
mut self,
depth_stencil_resolve_attachment: &'a AttachmentReference2KHR,
) -> SubpassDescriptionDepthStencilResolveKHRBuilder<'a> {
self.inner.p_depth_stencil_resolve_attachment = depth_stencil_resolve_attachment;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> SubpassDescriptionDepthStencilResolveKHR {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageViewASTCDecodeModeEXT.html>"]
pub struct ImageViewASTCDecodeModeEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub decode_mode: Format,
}
impl ::std::default::Default for ImageViewASTCDecodeModeEXT {
fn default() -> ImageViewASTCDecodeModeEXT {
ImageViewASTCDecodeModeEXT {
s_type: StructureType::IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
p_next: ::std::ptr::null(),
decode_mode: Format::default(),
}
}
}
impl ImageViewASTCDecodeModeEXT {
pub fn builder<'a>() -> ImageViewASTCDecodeModeEXTBuilder<'a> {
ImageViewASTCDecodeModeEXTBuilder {
inner: ImageViewASTCDecodeModeEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageViewASTCDecodeModeEXTBuilder<'a> {
inner: ImageViewASTCDecodeModeEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageViewCreateInfo for ImageViewASTCDecodeModeEXTBuilder<'_> {}
unsafe impl ExtendsImageViewCreateInfo for ImageViewASTCDecodeModeEXT {}
impl<'a> ::std::ops::Deref for ImageViewASTCDecodeModeEXTBuilder<'a> {
type Target = ImageViewASTCDecodeModeEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageViewASTCDecodeModeEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageViewASTCDecodeModeEXTBuilder<'a> {
pub fn decode_mode(mut self, decode_mode: Format) -> ImageViewASTCDecodeModeEXTBuilder<'a> {
self.inner.decode_mode = decode_mode;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageViewASTCDecodeModeEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html>"]
pub struct PhysicalDeviceASTCDecodeFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub decode_mode_shared_exponent: Bool32,
}
impl ::std::default::Default for PhysicalDeviceASTCDecodeFeaturesEXT {
fn default() -> PhysicalDeviceASTCDecodeFeaturesEXT {
PhysicalDeviceASTCDecodeFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
decode_mode_shared_exponent: Bool32::default(),
}
}
}
impl PhysicalDeviceASTCDecodeFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
PhysicalDeviceASTCDecodeFeaturesEXTBuilder {
inner: PhysicalDeviceASTCDecodeFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceASTCDecodeFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceASTCDecodeFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceASTCDecodeFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
pub fn decode_mode_shared_exponent(
mut self,
decode_mode_shared_exponent: bool,
) -> PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
self.inner.decode_mode_shared_exponent = decode_mode_shared_exponent.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceASTCDecodeFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceTransformFeedbackFeaturesEXT.html>"]
pub struct PhysicalDeviceTransformFeedbackFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub transform_feedback: Bool32,
pub geometry_streams: Bool32,
}
impl ::std::default::Default for PhysicalDeviceTransformFeedbackFeaturesEXT {
fn default() -> PhysicalDeviceTransformFeedbackFeaturesEXT {
PhysicalDeviceTransformFeedbackFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
transform_feedback: Bool32::default(),
geometry_streams: Bool32::default(),
}
}
}
impl PhysicalDeviceTransformFeedbackFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
PhysicalDeviceTransformFeedbackFeaturesEXTBuilder {
inner: PhysicalDeviceTransformFeedbackFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceTransformFeedbackFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTransformFeedbackFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceTransformFeedbackFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
pub fn transform_feedback(
mut self,
transform_feedback: bool,
) -> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
self.inner.transform_feedback = transform_feedback.into();
self
}
pub fn geometry_streams(
mut self,
geometry_streams: bool,
) -> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
self.inner.geometry_streams = geometry_streams.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceTransformFeedbackFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceTransformFeedbackPropertiesEXT.html>"]
pub struct PhysicalDeviceTransformFeedbackPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_transform_feedback_streams: u32,
pub max_transform_feedback_buffers: u32,
pub max_transform_feedback_buffer_size: DeviceSize,
pub max_transform_feedback_stream_data_size: u32,
pub max_transform_feedback_buffer_data_size: u32,
pub max_transform_feedback_buffer_data_stride: u32,
pub transform_feedback_queries: Bool32,
pub transform_feedback_streams_lines_triangles: Bool32,
pub transform_feedback_rasterization_stream_select: Bool32,
pub transform_feedback_draw: Bool32,
}
impl ::std::default::Default for PhysicalDeviceTransformFeedbackPropertiesEXT {
fn default() -> PhysicalDeviceTransformFeedbackPropertiesEXT {
PhysicalDeviceTransformFeedbackPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
max_transform_feedback_streams: u32::default(),
max_transform_feedback_buffers: u32::default(),
max_transform_feedback_buffer_size: DeviceSize::default(),
max_transform_feedback_stream_data_size: u32::default(),
max_transform_feedback_buffer_data_size: u32::default(),
max_transform_feedback_buffer_data_stride: u32::default(),
transform_feedback_queries: Bool32::default(),
transform_feedback_streams_lines_triangles: Bool32::default(),
transform_feedback_rasterization_stream_select: Bool32::default(),
transform_feedback_draw: Bool32::default(),
}
}
}
impl PhysicalDeviceTransformFeedbackPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
PhysicalDeviceTransformFeedbackPropertiesEXTBuilder {
inner: PhysicalDeviceTransformFeedbackPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceTransformFeedbackPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTransformFeedbackPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceTransformFeedbackPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
pub fn max_transform_feedback_streams(
mut self,
max_transform_feedback_streams: u32,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_streams = max_transform_feedback_streams;
self
}
pub fn max_transform_feedback_buffers(
mut self,
max_transform_feedback_buffers: u32,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_buffers = max_transform_feedback_buffers;
self
}
pub fn max_transform_feedback_buffer_size(
mut self,
max_transform_feedback_buffer_size: DeviceSize,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_buffer_size = max_transform_feedback_buffer_size;
self
}
pub fn max_transform_feedback_stream_data_size(
mut self,
max_transform_feedback_stream_data_size: u32,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_stream_data_size =
max_transform_feedback_stream_data_size;
self
}
pub fn max_transform_feedback_buffer_data_size(
mut self,
max_transform_feedback_buffer_data_size: u32,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_buffer_data_size =
max_transform_feedback_buffer_data_size;
self
}
pub fn max_transform_feedback_buffer_data_stride(
mut self,
max_transform_feedback_buffer_data_stride: u32,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.max_transform_feedback_buffer_data_stride =
max_transform_feedback_buffer_data_stride;
self
}
pub fn transform_feedback_queries(
mut self,
transform_feedback_queries: bool,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.transform_feedback_queries = transform_feedback_queries.into();
self
}
pub fn transform_feedback_streams_lines_triangles(
mut self,
transform_feedback_streams_lines_triangles: bool,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.transform_feedback_streams_lines_triangles =
transform_feedback_streams_lines_triangles.into();
self
}
pub fn transform_feedback_rasterization_stream_select(
mut self,
transform_feedback_rasterization_stream_select: bool,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.transform_feedback_rasterization_stream_select =
transform_feedback_rasterization_stream_select.into();
self
}
pub fn transform_feedback_draw(
mut self,
transform_feedback_draw: bool,
) -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
self.inner.transform_feedback_draw = transform_feedback_draw.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceTransformFeedbackPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRasterizationStateStreamCreateInfoEXT.html>"]
pub struct PipelineRasterizationStateStreamCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineRasterizationStateStreamCreateFlagsEXT,
pub rasterization_stream: u32,
}
impl ::std::default::Default for PipelineRasterizationStateStreamCreateInfoEXT {
fn default() -> PipelineRasterizationStateStreamCreateInfoEXT {
PipelineRasterizationStateStreamCreateInfoEXT {
s_type: StructureType::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
flags: PipelineRasterizationStateStreamCreateFlagsEXT::default(),
rasterization_stream: u32::default(),
}
}
}
impl PipelineRasterizationStateStreamCreateInfoEXT {
pub fn builder<'a>() -> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
PipelineRasterizationStateStreamCreateInfoEXTBuilder {
inner: PipelineRasterizationStateStreamCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
inner: PipelineRasterizationStateStreamCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPipelineRasterizationStateCreateInfo
for PipelineRasterizationStateStreamCreateInfoEXT
{
}
impl<'a> ::std::ops::Deref for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
type Target = PipelineRasterizationStateStreamCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineRasterizationStateStreamCreateFlagsEXT,
) -> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn rasterization_stream(
mut self,
rasterization_stream: u32,
) -> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
self.inner.rasterization_stream = rasterization_stream;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineRasterizationStateStreamCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV.html>"]
pub struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub representative_fragment_test: Bool32,
}
impl ::std::default::Default for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
fn default() -> PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
representative_fragment_test: Bool32::default(),
}
}
}
impl PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder {
inner: PhysicalDeviceRepresentativeFragmentTestFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
inner: PhysicalDeviceRepresentativeFragmentTestFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo
for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'_>
{
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
pub fn representative_fragment_test(
mut self,
representative_fragment_test: bool,
) -> PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
self.inner.representative_fragment_test = representative_fragment_test.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineRepresentativeFragmentTestStateCreateInfoNV.html>"]
pub struct PipelineRepresentativeFragmentTestStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub representative_fragment_test_enable: Bool32,
}
impl ::std::default::Default for PipelineRepresentativeFragmentTestStateCreateInfoNV {
fn default() -> PipelineRepresentativeFragmentTestStateCreateInfoNV {
PipelineRepresentativeFragmentTestStateCreateInfoNV {
s_type: StructureType::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
representative_fragment_test_enable: Bool32::default(),
}
}
}
impl PipelineRepresentativeFragmentTestStateCreateInfoNV {
pub fn builder<'a>() -> PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder {
inner: PipelineRepresentativeFragmentTestStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
inner: PipelineRepresentativeFragmentTestStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsGraphicsPipelineCreateInfo
for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsGraphicsPipelineCreateInfo
for PipelineRepresentativeFragmentTestStateCreateInfoNV
{
}
impl<'a> ::std::ops::Deref for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
type Target = PipelineRepresentativeFragmentTestStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
pub fn representative_fragment_test_enable(
mut self,
representative_fragment_test_enable: bool,
) -> PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
self.inner.representative_fragment_test_enable = representative_fragment_test_enable.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineRepresentativeFragmentTestStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html>"]
pub struct PhysicalDeviceExclusiveScissorFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub exclusive_scissor: Bool32,
}
impl ::std::default::Default for PhysicalDeviceExclusiveScissorFeaturesNV {
fn default() -> PhysicalDeviceExclusiveScissorFeaturesNV {
PhysicalDeviceExclusiveScissorFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
exclusive_scissor: Bool32::default(),
}
}
}
impl PhysicalDeviceExclusiveScissorFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
PhysicalDeviceExclusiveScissorFeaturesNVBuilder {
inner: PhysicalDeviceExclusiveScissorFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
inner: PhysicalDeviceExclusiveScissorFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExclusiveScissorFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceExclusiveScissorFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
pub fn exclusive_scissor(
mut self,
exclusive_scissor: bool,
) -> PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
self.inner.exclusive_scissor = exclusive_scissor.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceExclusiveScissorFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportExclusiveScissorStateCreateInfoNV.html>"]
pub struct PipelineViewportExclusiveScissorStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub exclusive_scissor_count: u32,
pub p_exclusive_scissors: *const Rect2D,
}
impl ::std::default::Default for PipelineViewportExclusiveScissorStateCreateInfoNV {
fn default() -> PipelineViewportExclusiveScissorStateCreateInfoNV {
PipelineViewportExclusiveScissorStateCreateInfoNV {
s_type: StructureType::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
exclusive_scissor_count: u32::default(),
p_exclusive_scissors: ::std::ptr::null(),
}
}
}
impl PipelineViewportExclusiveScissorStateCreateInfoNV {
pub fn builder<'a>() -> PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
PipelineViewportExclusiveScissorStateCreateInfoNVBuilder {
inner: PipelineViewportExclusiveScissorStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
inner: PipelineViewportExclusiveScissorStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportExclusiveScissorStateCreateInfoNV
{
}
impl<'a> ::std::ops::Deref for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
type Target = PipelineViewportExclusiveScissorStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
pub fn exclusive_scissors(
mut self,
exclusive_scissors: &'a [Rect2D],
) -> PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
self.inner.exclusive_scissor_count = exclusive_scissors.len() as _;
self.inner.p_exclusive_scissors = exclusive_scissors.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportExclusiveScissorStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html>"]
pub struct PhysicalDeviceCornerSampledImageFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub corner_sampled_image: Bool32,
}
impl ::std::default::Default for PhysicalDeviceCornerSampledImageFeaturesNV {
fn default() -> PhysicalDeviceCornerSampledImageFeaturesNV {
PhysicalDeviceCornerSampledImageFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
corner_sampled_image: Bool32::default(),
}
}
}
impl PhysicalDeviceCornerSampledImageFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
PhysicalDeviceCornerSampledImageFeaturesNVBuilder {
inner: PhysicalDeviceCornerSampledImageFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
inner: PhysicalDeviceCornerSampledImageFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCornerSampledImageFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceCornerSampledImageFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
pub fn corner_sampled_image(
mut self,
corner_sampled_image: bool,
) -> PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
self.inner.corner_sampled_image = corner_sampled_image.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceCornerSampledImageFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesNV.html>"]
pub struct PhysicalDeviceComputeShaderDerivativesFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub compute_derivative_group_quads: Bool32,
pub compute_derivative_group_linear: Bool32,
}
impl ::std::default::Default for PhysicalDeviceComputeShaderDerivativesFeaturesNV {
fn default() -> PhysicalDeviceComputeShaderDerivativesFeaturesNV {
PhysicalDeviceComputeShaderDerivativesFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
compute_derivative_group_quads: Bool32::default(),
compute_derivative_group_linear: Bool32::default(),
}
}
}
impl PhysicalDeviceComputeShaderDerivativesFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder {
inner: PhysicalDeviceComputeShaderDerivativesFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
inner: PhysicalDeviceComputeShaderDerivativesFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo
for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'_>
{
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceComputeShaderDerivativesFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
pub fn compute_derivative_group_quads(
mut self,
compute_derivative_group_quads: bool,
) -> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
self.inner.compute_derivative_group_quads = compute_derivative_group_quads.into();
self
}
pub fn compute_derivative_group_linear(
mut self,
compute_derivative_group_linear: bool,
) -> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
self.inner.compute_derivative_group_linear = compute_derivative_group_linear.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceComputeShaderDerivativesFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV.html>"]
pub struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub fragment_shader_barycentric: Bool32,
}
impl ::std::default::Default for PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
fn default() -> PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
fragment_shader_barycentric: Bool32::default(),
}
}
}
impl PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder {
inner: PhysicalDeviceFragmentShaderBarycentricFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
inner: PhysicalDeviceFragmentShaderBarycentricFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo
for PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'_>
{
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShaderBarycentricFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
pub fn fragment_shader_barycentric(
mut self,
fragment_shader_barycentric: bool,
) -> PhysicalDeviceFragmentShaderBarycentricFeaturesNVBuilder<'a> {
self.inner.fragment_shader_barycentric = fragment_shader_barycentric.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFragmentShaderBarycentricFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShaderImageFootprintFeaturesNV.html>"]
pub struct PhysicalDeviceShaderImageFootprintFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub image_footprint: Bool32,
}
impl ::std::default::Default for PhysicalDeviceShaderImageFootprintFeaturesNV {
fn default() -> PhysicalDeviceShaderImageFootprintFeaturesNV {
PhysicalDeviceShaderImageFootprintFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
image_footprint: Bool32::default(),
}
}
}
impl PhysicalDeviceShaderImageFootprintFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
PhysicalDeviceShaderImageFootprintFeaturesNVBuilder {
inner: PhysicalDeviceShaderImageFootprintFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
inner: PhysicalDeviceShaderImageFootprintFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageFootprintFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceShaderImageFootprintFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
pub fn image_footprint(
mut self,
image_footprint: bool,
) -> PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
self.inner.image_footprint = image_footprint.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShaderImageFootprintFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShadingRatePaletteNV.html>"]
pub struct ShadingRatePaletteNV {
pub shading_rate_palette_entry_count: u32,
pub p_shading_rate_palette_entries: *const ShadingRatePaletteEntryNV,
}
impl ::std::default::Default for ShadingRatePaletteNV {
fn default() -> ShadingRatePaletteNV {
ShadingRatePaletteNV {
shading_rate_palette_entry_count: u32::default(),
p_shading_rate_palette_entries: ::std::ptr::null(),
}
}
}
impl ShadingRatePaletteNV {
pub fn builder<'a>() -> ShadingRatePaletteNVBuilder<'a> {
ShadingRatePaletteNVBuilder {
inner: ShadingRatePaletteNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ShadingRatePaletteNVBuilder<'a> {
inner: ShadingRatePaletteNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for ShadingRatePaletteNVBuilder<'a> {
type Target = ShadingRatePaletteNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ShadingRatePaletteNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ShadingRatePaletteNVBuilder<'a> {
pub fn shading_rate_palette_entries(
mut self,
shading_rate_palette_entries: &'a [ShadingRatePaletteEntryNV],
) -> ShadingRatePaletteNVBuilder<'a> {
self.inner.shading_rate_palette_entry_count = shading_rate_palette_entries.len() as _;
self.inner.p_shading_rate_palette_entries = shading_rate_palette_entries.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ShadingRatePaletteNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportShadingRateImageStateCreateInfoNV.html>"]
pub struct PipelineViewportShadingRateImageStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub shading_rate_image_enable: Bool32,
pub viewport_count: u32,
pub p_shading_rate_palettes: *const ShadingRatePaletteNV,
}
impl ::std::default::Default for PipelineViewportShadingRateImageStateCreateInfoNV {
fn default() -> PipelineViewportShadingRateImageStateCreateInfoNV {
PipelineViewportShadingRateImageStateCreateInfoNV {
s_type: StructureType::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
shading_rate_image_enable: Bool32::default(),
viewport_count: u32::default(),
p_shading_rate_palettes: ::std::ptr::null(),
}
}
}
impl PipelineViewportShadingRateImageStateCreateInfoNV {
pub fn builder<'a>() -> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
PipelineViewportShadingRateImageStateCreateInfoNVBuilder {
inner: PipelineViewportShadingRateImageStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
inner: PipelineViewportShadingRateImageStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportShadingRateImageStateCreateInfoNV
{
}
impl<'a> ::std::ops::Deref for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
type Target = PipelineViewportShadingRateImageStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
pub fn shading_rate_image_enable(
mut self,
shading_rate_image_enable: bool,
) -> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
self.inner.shading_rate_image_enable = shading_rate_image_enable.into();
self
}
pub fn shading_rate_palettes(
mut self,
shading_rate_palettes: &'a [ShadingRatePaletteNV],
) -> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
self.inner.viewport_count = shading_rate_palettes.len() as _;
self.inner.p_shading_rate_palettes = shading_rate_palettes.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportShadingRateImageStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShadingRateImageFeaturesNV.html>"]
pub struct PhysicalDeviceShadingRateImageFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shading_rate_image: Bool32,
pub shading_rate_coarse_sample_order: Bool32,
}
impl ::std::default::Default for PhysicalDeviceShadingRateImageFeaturesNV {
fn default() -> PhysicalDeviceShadingRateImageFeaturesNV {
PhysicalDeviceShadingRateImageFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
shading_rate_image: Bool32::default(),
shading_rate_coarse_sample_order: Bool32::default(),
}
}
}
impl PhysicalDeviceShadingRateImageFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
PhysicalDeviceShadingRateImageFeaturesNVBuilder {
inner: PhysicalDeviceShadingRateImageFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
inner: PhysicalDeviceShadingRateImageFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShadingRateImageFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceShadingRateImageFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
pub fn shading_rate_image(
mut self,
shading_rate_image: bool,
) -> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
self.inner.shading_rate_image = shading_rate_image.into();
self
}
pub fn shading_rate_coarse_sample_order(
mut self,
shading_rate_coarse_sample_order: bool,
) -> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
self.inner.shading_rate_coarse_sample_order = shading_rate_coarse_sample_order.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShadingRateImageFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceShadingRateImagePropertiesNV.html>"]
pub struct PhysicalDeviceShadingRateImagePropertiesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shading_rate_texel_size: Extent2D,
pub shading_rate_palette_size: u32,
pub shading_rate_max_coarse_samples: u32,
}
impl ::std::default::Default for PhysicalDeviceShadingRateImagePropertiesNV {
fn default() -> PhysicalDeviceShadingRateImagePropertiesNV {
PhysicalDeviceShadingRateImagePropertiesNV {
s_type: StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
p_next: ::std::ptr::null_mut(),
shading_rate_texel_size: Extent2D::default(),
shading_rate_palette_size: u32::default(),
shading_rate_max_coarse_samples: u32::default(),
}
}
}
impl PhysicalDeviceShadingRateImagePropertiesNV {
pub fn builder<'a>() -> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
PhysicalDeviceShadingRateImagePropertiesNVBuilder {
inner: PhysicalDeviceShadingRateImagePropertiesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
inner: PhysicalDeviceShadingRateImagePropertiesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShadingRateImagePropertiesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
type Target = PhysicalDeviceShadingRateImagePropertiesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
pub fn shading_rate_texel_size(
mut self,
shading_rate_texel_size: Extent2D,
) -> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
self.inner.shading_rate_texel_size = shading_rate_texel_size;
self
}
pub fn shading_rate_palette_size(
mut self,
shading_rate_palette_size: u32,
) -> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
self.inner.shading_rate_palette_size = shading_rate_palette_size;
self
}
pub fn shading_rate_max_coarse_samples(
mut self,
shading_rate_max_coarse_samples: u32,
) -> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
self.inner.shading_rate_max_coarse_samples = shading_rate_max_coarse_samples;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceShadingRateImagePropertiesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCoarseSampleLocationNV.html>"]
pub struct CoarseSampleLocationNV {
pub pixel_x: u32,
pub pixel_y: u32,
pub sample: u32,
}
impl CoarseSampleLocationNV {
pub fn builder<'a>() -> CoarseSampleLocationNVBuilder<'a> {
CoarseSampleLocationNVBuilder {
inner: CoarseSampleLocationNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CoarseSampleLocationNVBuilder<'a> {
inner: CoarseSampleLocationNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for CoarseSampleLocationNVBuilder<'a> {
type Target = CoarseSampleLocationNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CoarseSampleLocationNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CoarseSampleLocationNVBuilder<'a> {
pub fn pixel_x(mut self, pixel_x: u32) -> CoarseSampleLocationNVBuilder<'a> {
self.inner.pixel_x = pixel_x;
self
}
pub fn pixel_y(mut self, pixel_y: u32) -> CoarseSampleLocationNVBuilder<'a> {
self.inner.pixel_y = pixel_y;
self
}
pub fn sample(mut self, sample: u32) -> CoarseSampleLocationNVBuilder<'a> {
self.inner.sample = sample;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CoarseSampleLocationNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCoarseSampleOrderCustomNV.html>"]
pub struct CoarseSampleOrderCustomNV {
pub shading_rate: ShadingRatePaletteEntryNV,
pub sample_count: u32,
pub sample_location_count: u32,
pub p_sample_locations: *const CoarseSampleLocationNV,
}
impl ::std::default::Default for CoarseSampleOrderCustomNV {
fn default() -> CoarseSampleOrderCustomNV {
CoarseSampleOrderCustomNV {
shading_rate: ShadingRatePaletteEntryNV::default(),
sample_count: u32::default(),
sample_location_count: u32::default(),
p_sample_locations: ::std::ptr::null(),
}
}
}
impl CoarseSampleOrderCustomNV {
pub fn builder<'a>() -> CoarseSampleOrderCustomNVBuilder<'a> {
CoarseSampleOrderCustomNVBuilder {
inner: CoarseSampleOrderCustomNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct CoarseSampleOrderCustomNVBuilder<'a> {
inner: CoarseSampleOrderCustomNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for CoarseSampleOrderCustomNVBuilder<'a> {
type Target = CoarseSampleOrderCustomNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for CoarseSampleOrderCustomNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> CoarseSampleOrderCustomNVBuilder<'a> {
pub fn shading_rate(
mut self,
shading_rate: ShadingRatePaletteEntryNV,
) -> CoarseSampleOrderCustomNVBuilder<'a> {
self.inner.shading_rate = shading_rate;
self
}
pub fn sample_count(mut self, sample_count: u32) -> CoarseSampleOrderCustomNVBuilder<'a> {
self.inner.sample_count = sample_count;
self
}
pub fn sample_locations(
mut self,
sample_locations: &'a [CoarseSampleLocationNV],
) -> CoarseSampleOrderCustomNVBuilder<'a> {
self.inner.sample_location_count = sample_locations.len() as _;
self.inner.p_sample_locations = sample_locations.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> CoarseSampleOrderCustomNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineViewportCoarseSampleOrderStateCreateInfoNV.html>"]
pub struct PipelineViewportCoarseSampleOrderStateCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub sample_order_type: CoarseSampleOrderTypeNV,
pub custom_sample_order_count: u32,
pub p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
}
impl ::std::default::Default for PipelineViewportCoarseSampleOrderStateCreateInfoNV {
fn default() -> PipelineViewportCoarseSampleOrderStateCreateInfoNV {
PipelineViewportCoarseSampleOrderStateCreateInfoNV {
s_type: StructureType::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
sample_order_type: CoarseSampleOrderTypeNV::default(),
custom_sample_order_count: u32::default(),
p_custom_sample_orders: ::std::ptr::null(),
}
}
}
impl PipelineViewportCoarseSampleOrderStateCreateInfoNV {
pub fn builder<'a>() -> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder {
inner: PipelineViewportCoarseSampleOrderStateCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
inner: PipelineViewportCoarseSampleOrderStateCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'_>
{
}
unsafe impl ExtendsPipelineViewportStateCreateInfo
for PipelineViewportCoarseSampleOrderStateCreateInfoNV
{
}
impl<'a> ::std::ops::Deref for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
type Target = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
pub fn sample_order_type(
mut self,
sample_order_type: CoarseSampleOrderTypeNV,
) -> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
self.inner.sample_order_type = sample_order_type;
self
}
pub fn custom_sample_orders(
mut self,
custom_sample_orders: &'a [CoarseSampleOrderCustomNV],
) -> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
self.inner.custom_sample_order_count = custom_sample_orders.len() as _;
self.inner.p_custom_sample_orders = custom_sample_orders.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PipelineViewportCoarseSampleOrderStateCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html>"]
pub struct PhysicalDeviceMeshShaderFeaturesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub task_shader: Bool32,
pub mesh_shader: Bool32,
}
impl ::std::default::Default for PhysicalDeviceMeshShaderFeaturesNV {
fn default() -> PhysicalDeviceMeshShaderFeaturesNV {
PhysicalDeviceMeshShaderFeaturesNV {
s_type: StructureType::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
p_next: ::std::ptr::null_mut(),
task_shader: Bool32::default(),
mesh_shader: Bool32::default(),
}
}
}
impl PhysicalDeviceMeshShaderFeaturesNV {
pub fn builder<'a>() -> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
PhysicalDeviceMeshShaderFeaturesNVBuilder {
inner: PhysicalDeviceMeshShaderFeaturesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
inner: PhysicalDeviceMeshShaderFeaturesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesNVBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
type Target = PhysicalDeviceMeshShaderFeaturesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
pub fn task_shader(
mut self,
task_shader: bool,
) -> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
self.inner.task_shader = task_shader.into();
self
}
pub fn mesh_shader(
mut self,
mesh_shader: bool,
) -> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
self.inner.mesh_shader = mesh_shader.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMeshShaderFeaturesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html>"]
pub struct PhysicalDeviceMeshShaderPropertiesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub max_draw_mesh_tasks_count: u32,
pub max_task_work_group_invocations: u32,
pub max_task_work_group_size: [u32; 3],
pub max_task_total_memory_size: u32,
pub max_task_output_count: u32,
pub max_mesh_work_group_invocations: u32,
pub max_mesh_work_group_size: [u32; 3],
pub max_mesh_total_memory_size: u32,
pub max_mesh_output_vertices: u32,
pub max_mesh_output_primitives: u32,
pub max_mesh_multiview_view_count: u32,
pub mesh_output_per_vertex_granularity: u32,
pub mesh_output_per_primitive_granularity: u32,
}
impl ::std::default::Default for PhysicalDeviceMeshShaderPropertiesNV {
fn default() -> PhysicalDeviceMeshShaderPropertiesNV {
PhysicalDeviceMeshShaderPropertiesNV {
s_type: StructureType::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
p_next: ::std::ptr::null_mut(),
max_draw_mesh_tasks_count: u32::default(),
max_task_work_group_invocations: u32::default(),
max_task_work_group_size: unsafe { ::std::mem::zeroed() },
max_task_total_memory_size: u32::default(),
max_task_output_count: u32::default(),
max_mesh_work_group_invocations: u32::default(),
max_mesh_work_group_size: unsafe { ::std::mem::zeroed() },
max_mesh_total_memory_size: u32::default(),
max_mesh_output_vertices: u32::default(),
max_mesh_output_primitives: u32::default(),
max_mesh_multiview_view_count: u32::default(),
mesh_output_per_vertex_granularity: u32::default(),
mesh_output_per_primitive_granularity: u32::default(),
}
}
}
impl PhysicalDeviceMeshShaderPropertiesNV {
pub fn builder<'a>() -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
PhysicalDeviceMeshShaderPropertiesNVBuilder {
inner: PhysicalDeviceMeshShaderPropertiesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
inner: PhysicalDeviceMeshShaderPropertiesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesNVBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
type Target = PhysicalDeviceMeshShaderPropertiesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
pub fn max_draw_mesh_tasks_count(
mut self,
max_draw_mesh_tasks_count: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_draw_mesh_tasks_count = max_draw_mesh_tasks_count;
self
}
pub fn max_task_work_group_invocations(
mut self,
max_task_work_group_invocations: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_task_work_group_invocations = max_task_work_group_invocations;
self
}
pub fn max_task_work_group_size(
mut self,
max_task_work_group_size: [u32; 3],
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_task_work_group_size = max_task_work_group_size;
self
}
pub fn max_task_total_memory_size(
mut self,
max_task_total_memory_size: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_task_total_memory_size = max_task_total_memory_size;
self
}
pub fn max_task_output_count(
mut self,
max_task_output_count: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_task_output_count = max_task_output_count;
self
}
pub fn max_mesh_work_group_invocations(
mut self,
max_mesh_work_group_invocations: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_work_group_invocations = max_mesh_work_group_invocations;
self
}
pub fn max_mesh_work_group_size(
mut self,
max_mesh_work_group_size: [u32; 3],
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_work_group_size = max_mesh_work_group_size;
self
}
pub fn max_mesh_total_memory_size(
mut self,
max_mesh_total_memory_size: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_total_memory_size = max_mesh_total_memory_size;
self
}
pub fn max_mesh_output_vertices(
mut self,
max_mesh_output_vertices: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_output_vertices = max_mesh_output_vertices;
self
}
pub fn max_mesh_output_primitives(
mut self,
max_mesh_output_primitives: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_output_primitives = max_mesh_output_primitives;
self
}
pub fn max_mesh_multiview_view_count(
mut self,
max_mesh_multiview_view_count: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.max_mesh_multiview_view_count = max_mesh_multiview_view_count;
self
}
pub fn mesh_output_per_vertex_granularity(
mut self,
mesh_output_per_vertex_granularity: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.mesh_output_per_vertex_granularity = mesh_output_per_vertex_granularity;
self
}
pub fn mesh_output_per_primitive_granularity(
mut self,
mesh_output_per_primitive_granularity: u32,
) -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
self.inner.mesh_output_per_primitive_granularity = mesh_output_per_primitive_granularity;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMeshShaderPropertiesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDrawMeshTasksIndirectCommandNV.html>"]
pub struct DrawMeshTasksIndirectCommandNV {
pub task_count: u32,
pub first_task: u32,
}
impl DrawMeshTasksIndirectCommandNV {
pub fn builder<'a>() -> DrawMeshTasksIndirectCommandNVBuilder<'a> {
DrawMeshTasksIndirectCommandNVBuilder {
inner: DrawMeshTasksIndirectCommandNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DrawMeshTasksIndirectCommandNVBuilder<'a> {
inner: DrawMeshTasksIndirectCommandNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DrawMeshTasksIndirectCommandNVBuilder<'a> {
type Target = DrawMeshTasksIndirectCommandNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DrawMeshTasksIndirectCommandNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DrawMeshTasksIndirectCommandNVBuilder<'a> {
pub fn task_count(mut self, task_count: u32) -> DrawMeshTasksIndirectCommandNVBuilder<'a> {
self.inner.task_count = task_count;
self
}
pub fn first_task(mut self, first_task: u32) -> DrawMeshTasksIndirectCommandNVBuilder<'a> {
self.inner.first_task = first_task;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DrawMeshTasksIndirectCommandNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRayTracingShaderGroupCreateInfoNV.html>"]
pub struct RayTracingShaderGroupCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub ty: RayTracingShaderGroupTypeNV,
pub general_shader: u32,
pub closest_hit_shader: u32,
pub any_hit_shader: u32,
pub intersection_shader: u32,
}
impl ::std::default::Default for RayTracingShaderGroupCreateInfoNV {
fn default() -> RayTracingShaderGroupCreateInfoNV {
RayTracingShaderGroupCreateInfoNV {
s_type: StructureType::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
ty: RayTracingShaderGroupTypeNV::default(),
general_shader: u32::default(),
closest_hit_shader: u32::default(),
any_hit_shader: u32::default(),
intersection_shader: u32::default(),
}
}
}
impl RayTracingShaderGroupCreateInfoNV {
pub fn builder<'a>() -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
RayTracingShaderGroupCreateInfoNVBuilder {
inner: RayTracingShaderGroupCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RayTracingShaderGroupCreateInfoNVBuilder<'a> {
inner: RayTracingShaderGroupCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsRayTracingShaderGroupCreateInfoNV {}
impl<'a> ::std::ops::Deref for RayTracingShaderGroupCreateInfoNVBuilder<'a> {
type Target = RayTracingShaderGroupCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RayTracingShaderGroupCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
pub fn ty(
mut self,
ty: RayTracingShaderGroupTypeNV,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn general_shader(
mut self,
general_shader: u32,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
self.inner.general_shader = general_shader;
self
}
pub fn closest_hit_shader(
mut self,
closest_hit_shader: u32,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
self.inner.closest_hit_shader = closest_hit_shader;
self
}
pub fn any_hit_shader(
mut self,
any_hit_shader: u32,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
self.inner.any_hit_shader = any_hit_shader;
self
}
pub fn intersection_shader(
mut self,
intersection_shader: u32,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
self.inner.intersection_shader = intersection_shader;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsRayTracingShaderGroupCreateInfoNV>(
mut self,
next: &'a mut T,
) -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RayTracingShaderGroupCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRayTracingPipelineCreateInfoNV.html>"]
pub struct RayTracingPipelineCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub flags: PipelineCreateFlags,
pub stage_count: u32,
pub p_stages: *const PipelineShaderStageCreateInfo,
pub group_count: u32,
pub p_groups: *const RayTracingShaderGroupCreateInfoNV,
pub max_recursion_depth: u32,
pub layout: PipelineLayout,
pub base_pipeline_handle: Pipeline,
pub base_pipeline_index: i32,
}
impl ::std::default::Default for RayTracingPipelineCreateInfoNV {
fn default() -> RayTracingPipelineCreateInfoNV {
RayTracingPipelineCreateInfoNV {
s_type: StructureType::RAY_TRACING_PIPELINE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
flags: PipelineCreateFlags::default(),
stage_count: u32::default(),
p_stages: ::std::ptr::null(),
group_count: u32::default(),
p_groups: ::std::ptr::null(),
max_recursion_depth: u32::default(),
layout: PipelineLayout::default(),
base_pipeline_handle: Pipeline::default(),
base_pipeline_index: i32::default(),
}
}
}
impl RayTracingPipelineCreateInfoNV {
pub fn builder<'a>() -> RayTracingPipelineCreateInfoNVBuilder<'a> {
RayTracingPipelineCreateInfoNVBuilder {
inner: RayTracingPipelineCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RayTracingPipelineCreateInfoNVBuilder<'a> {
inner: RayTracingPipelineCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsRayTracingPipelineCreateInfoNV {}
impl<'a> ::std::ops::Deref for RayTracingPipelineCreateInfoNVBuilder<'a> {
type Target = RayTracingPipelineCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RayTracingPipelineCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RayTracingPipelineCreateInfoNVBuilder<'a> {
pub fn flags(
mut self,
flags: PipelineCreateFlags,
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn stages(
mut self,
stages: &'a [PipelineShaderStageCreateInfo],
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.stage_count = stages.len() as _;
self.inner.p_stages = stages.as_ptr();
self
}
pub fn groups(
mut self,
groups: &'a [RayTracingShaderGroupCreateInfoNV],
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.group_count = groups.len() as _;
self.inner.p_groups = groups.as_ptr();
self
}
pub fn max_recursion_depth(
mut self,
max_recursion_depth: u32,
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.max_recursion_depth = max_recursion_depth;
self
}
pub fn layout(mut self, layout: PipelineLayout) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.layout = layout;
self
}
pub fn base_pipeline_handle(
mut self,
base_pipeline_handle: Pipeline,
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.base_pipeline_handle = base_pipeline_handle;
self
}
pub fn base_pipeline_index(
mut self,
base_pipeline_index: i32,
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
self.inner.base_pipeline_index = base_pipeline_index;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsRayTracingPipelineCreateInfoNV>(
mut self,
next: &'a mut T,
) -> RayTracingPipelineCreateInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RayTracingPipelineCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryTrianglesNV.html>"]
pub struct GeometryTrianglesNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub vertex_data: Buffer,
pub vertex_offset: DeviceSize,
pub vertex_count: u32,
pub vertex_stride: DeviceSize,
pub vertex_format: Format,
pub index_data: Buffer,
pub index_offset: DeviceSize,
pub index_count: u32,
pub index_type: IndexType,
pub transform_data: Buffer,
pub transform_offset: DeviceSize,
}
impl ::std::default::Default for GeometryTrianglesNV {
fn default() -> GeometryTrianglesNV {
GeometryTrianglesNV {
s_type: StructureType::GEOMETRY_TRIANGLES_NV,
p_next: ::std::ptr::null(),
vertex_data: Buffer::default(),
vertex_offset: DeviceSize::default(),
vertex_count: u32::default(),
vertex_stride: DeviceSize::default(),
vertex_format: Format::default(),
index_data: Buffer::default(),
index_offset: DeviceSize::default(),
index_count: u32::default(),
index_type: IndexType::default(),
transform_data: Buffer::default(),
transform_offset: DeviceSize::default(),
}
}
}
impl GeometryTrianglesNV {
pub fn builder<'a>() -> GeometryTrianglesNVBuilder<'a> {
GeometryTrianglesNVBuilder {
inner: GeometryTrianglesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct GeometryTrianglesNVBuilder<'a> {
inner: GeometryTrianglesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsGeometryTrianglesNV {}
impl<'a> ::std::ops::Deref for GeometryTrianglesNVBuilder<'a> {
type Target = GeometryTrianglesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for GeometryTrianglesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> GeometryTrianglesNVBuilder<'a> {
pub fn vertex_data(mut self, vertex_data: Buffer) -> GeometryTrianglesNVBuilder<'a> {
self.inner.vertex_data = vertex_data;
self
}
pub fn vertex_offset(mut self, vertex_offset: DeviceSize) -> GeometryTrianglesNVBuilder<'a> {
self.inner.vertex_offset = vertex_offset;
self
}
pub fn vertex_count(mut self, vertex_count: u32) -> GeometryTrianglesNVBuilder<'a> {
self.inner.vertex_count = vertex_count;
self
}
pub fn vertex_stride(mut self, vertex_stride: DeviceSize) -> GeometryTrianglesNVBuilder<'a> {
self.inner.vertex_stride = vertex_stride;
self
}
pub fn vertex_format(mut self, vertex_format: Format) -> GeometryTrianglesNVBuilder<'a> {
self.inner.vertex_format = vertex_format;
self
}
pub fn index_data(mut self, index_data: Buffer) -> GeometryTrianglesNVBuilder<'a> {
self.inner.index_data = index_data;
self
}
pub fn index_offset(mut self, index_offset: DeviceSize) -> GeometryTrianglesNVBuilder<'a> {
self.inner.index_offset = index_offset;
self
}
pub fn index_count(mut self, index_count: u32) -> GeometryTrianglesNVBuilder<'a> {
self.inner.index_count = index_count;
self
}
pub fn index_type(mut self, index_type: IndexType) -> GeometryTrianglesNVBuilder<'a> {
self.inner.index_type = index_type;
self
}
pub fn transform_data(mut self, transform_data: Buffer) -> GeometryTrianglesNVBuilder<'a> {
self.inner.transform_data = transform_data;
self
}
pub fn transform_offset(
mut self,
transform_offset: DeviceSize,
) -> GeometryTrianglesNVBuilder<'a> {
self.inner.transform_offset = transform_offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsGeometryTrianglesNV>(
mut self,
next: &'a mut T,
) -> GeometryTrianglesNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> GeometryTrianglesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryAABBNV.html>"]
pub struct GeometryAABBNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub aabb_data: Buffer,
pub num_aab_bs: u32,
pub stride: u32,
pub offset: DeviceSize,
}
impl ::std::default::Default for GeometryAABBNV {
fn default() -> GeometryAABBNV {
GeometryAABBNV {
s_type: StructureType::GEOMETRY_AABB_NV,
p_next: ::std::ptr::null(),
aabb_data: Buffer::default(),
num_aab_bs: u32::default(),
stride: u32::default(),
offset: DeviceSize::default(),
}
}
}
impl GeometryAABBNV {
pub fn builder<'a>() -> GeometryAABBNVBuilder<'a> {
GeometryAABBNVBuilder {
inner: GeometryAABBNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct GeometryAABBNVBuilder<'a> {
inner: GeometryAABBNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsGeometryAABBNV {}
impl<'a> ::std::ops::Deref for GeometryAABBNVBuilder<'a> {
type Target = GeometryAABBNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for GeometryAABBNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> GeometryAABBNVBuilder<'a> {
pub fn aabb_data(mut self, aabb_data: Buffer) -> GeometryAABBNVBuilder<'a> {
self.inner.aabb_data = aabb_data;
self
}
pub fn num_aab_bs(mut self, num_aab_bs: u32) -> GeometryAABBNVBuilder<'a> {
self.inner.num_aab_bs = num_aab_bs;
self
}
pub fn stride(mut self, stride: u32) -> GeometryAABBNVBuilder<'a> {
self.inner.stride = stride;
self
}
pub fn offset(mut self, offset: DeviceSize) -> GeometryAABBNVBuilder<'a> {
self.inner.offset = offset;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsGeometryAABBNV>(
mut self,
next: &'a mut T,
) -> GeometryAABBNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> GeometryAABBNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryDataNV.html>"]
pub struct GeometryDataNV {
pub triangles: GeometryTrianglesNV,
pub aabbs: GeometryAABBNV,
}
impl GeometryDataNV {
pub fn builder<'a>() -> GeometryDataNVBuilder<'a> {
GeometryDataNVBuilder {
inner: GeometryDataNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct GeometryDataNVBuilder<'a> {
inner: GeometryDataNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for GeometryDataNVBuilder<'a> {
type Target = GeometryDataNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for GeometryDataNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> GeometryDataNVBuilder<'a> {
pub fn triangles(mut self, triangles: GeometryTrianglesNV) -> GeometryDataNVBuilder<'a> {
self.inner.triangles = triangles;
self
}
pub fn aabbs(mut self, aabbs: GeometryAABBNV) -> GeometryDataNVBuilder<'a> {
self.inner.aabbs = aabbs;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> GeometryDataNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryNV.html>"]
pub struct GeometryNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub geometry_type: GeometryTypeNV,
pub geometry: GeometryDataNV,
pub flags: GeometryFlagsNV,
}
impl ::std::default::Default for GeometryNV {
fn default() -> GeometryNV {
GeometryNV {
s_type: StructureType::GEOMETRY_NV,
p_next: ::std::ptr::null(),
geometry_type: GeometryTypeNV::default(),
geometry: GeometryDataNV::default(),
flags: GeometryFlagsNV::default(),
}
}
}
impl GeometryNV {
pub fn builder<'a>() -> GeometryNVBuilder<'a> {
GeometryNVBuilder {
inner: GeometryNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct GeometryNVBuilder<'a> {
inner: GeometryNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsGeometryNV {}
impl<'a> ::std::ops::Deref for GeometryNVBuilder<'a> {
type Target = GeometryNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for GeometryNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> GeometryNVBuilder<'a> {
pub fn geometry_type(mut self, geometry_type: GeometryTypeNV) -> GeometryNVBuilder<'a> {
self.inner.geometry_type = geometry_type;
self
}
pub fn geometry(mut self, geometry: GeometryDataNV) -> GeometryNVBuilder<'a> {
self.inner.geometry = geometry;
self
}
pub fn flags(mut self, flags: GeometryFlagsNV) -> GeometryNVBuilder<'a> {
self.inner.flags = flags;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsGeometryNV>(mut self, next: &'a mut T) -> GeometryNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> GeometryNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureInfoNV.html>"]
pub struct AccelerationStructureInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub ty: AccelerationStructureTypeNV,
pub flags: BuildAccelerationStructureFlagsNV,
pub instance_count: u32,
pub geometry_count: u32,
pub p_geometries: *const GeometryNV,
}
impl ::std::default::Default for AccelerationStructureInfoNV {
fn default() -> AccelerationStructureInfoNV {
AccelerationStructureInfoNV {
s_type: StructureType::ACCELERATION_STRUCTURE_INFO_NV,
p_next: ::std::ptr::null(),
ty: AccelerationStructureTypeNV::default(),
flags: BuildAccelerationStructureFlagsNV::default(),
instance_count: u32::default(),
geometry_count: u32::default(),
p_geometries: ::std::ptr::null(),
}
}
}
impl AccelerationStructureInfoNV {
pub fn builder<'a>() -> AccelerationStructureInfoNVBuilder<'a> {
AccelerationStructureInfoNVBuilder {
inner: AccelerationStructureInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AccelerationStructureInfoNVBuilder<'a> {
inner: AccelerationStructureInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAccelerationStructureInfoNV {}
impl<'a> ::std::ops::Deref for AccelerationStructureInfoNVBuilder<'a> {
type Target = AccelerationStructureInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AccelerationStructureInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AccelerationStructureInfoNVBuilder<'a> {
pub fn ty(mut self, ty: AccelerationStructureTypeNV) -> AccelerationStructureInfoNVBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn flags(
mut self,
flags: BuildAccelerationStructureFlagsNV,
) -> AccelerationStructureInfoNVBuilder<'a> {
self.inner.flags = flags;
self
}
pub fn instance_count(mut self, instance_count: u32) -> AccelerationStructureInfoNVBuilder<'a> {
self.inner.instance_count = instance_count;
self
}
pub fn geometries(
mut self,
geometries: &'a [GeometryNV],
) -> AccelerationStructureInfoNVBuilder<'a> {
self.inner.geometry_count = geometries.len() as _;
self.inner.p_geometries = geometries.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAccelerationStructureInfoNV>(
mut self,
next: &'a mut T,
) -> AccelerationStructureInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AccelerationStructureInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureCreateInfoNV.html>"]
pub struct AccelerationStructureCreateInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub compacted_size: DeviceSize,
pub info: AccelerationStructureInfoNV,
}
impl ::std::default::Default for AccelerationStructureCreateInfoNV {
fn default() -> AccelerationStructureCreateInfoNV {
AccelerationStructureCreateInfoNV {
s_type: StructureType::ACCELERATION_STRUCTURE_CREATE_INFO_NV,
p_next: ::std::ptr::null(),
compacted_size: DeviceSize::default(),
info: AccelerationStructureInfoNV::default(),
}
}
}
impl AccelerationStructureCreateInfoNV {
pub fn builder<'a>() -> AccelerationStructureCreateInfoNVBuilder<'a> {
AccelerationStructureCreateInfoNVBuilder {
inner: AccelerationStructureCreateInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AccelerationStructureCreateInfoNVBuilder<'a> {
inner: AccelerationStructureCreateInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAccelerationStructureCreateInfoNV {}
impl<'a> ::std::ops::Deref for AccelerationStructureCreateInfoNVBuilder<'a> {
type Target = AccelerationStructureCreateInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AccelerationStructureCreateInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AccelerationStructureCreateInfoNVBuilder<'a> {
pub fn compacted_size(
mut self,
compacted_size: DeviceSize,
) -> AccelerationStructureCreateInfoNVBuilder<'a> {
self.inner.compacted_size = compacted_size;
self
}
pub fn info(
mut self,
info: AccelerationStructureInfoNV,
) -> AccelerationStructureCreateInfoNVBuilder<'a> {
self.inner.info = info;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAccelerationStructureCreateInfoNV>(
mut self,
next: &'a mut T,
) -> AccelerationStructureCreateInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AccelerationStructureCreateInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBindAccelerationStructureMemoryInfoNV.html>"]
pub struct BindAccelerationStructureMemoryInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub acceleration_structure: AccelerationStructureNV,
pub memory: DeviceMemory,
pub memory_offset: DeviceSize,
pub device_index_count: u32,
pub p_device_indices: *const u32,
}
impl ::std::default::Default for BindAccelerationStructureMemoryInfoNV {
fn default() -> BindAccelerationStructureMemoryInfoNV {
BindAccelerationStructureMemoryInfoNV {
s_type: StructureType::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
p_next: ::std::ptr::null(),
acceleration_structure: AccelerationStructureNV::default(),
memory: DeviceMemory::default(),
memory_offset: DeviceSize::default(),
device_index_count: u32::default(),
p_device_indices: ::std::ptr::null(),
}
}
}
impl BindAccelerationStructureMemoryInfoNV {
pub fn builder<'a>() -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
BindAccelerationStructureMemoryInfoNVBuilder {
inner: BindAccelerationStructureMemoryInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BindAccelerationStructureMemoryInfoNVBuilder<'a> {
inner: BindAccelerationStructureMemoryInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBindAccelerationStructureMemoryInfoNV {}
impl<'a> ::std::ops::Deref for BindAccelerationStructureMemoryInfoNVBuilder<'a> {
type Target = BindAccelerationStructureMemoryInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BindAccelerationStructureMemoryInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
pub fn acceleration_structure(
mut self,
acceleration_structure: AccelerationStructureNV,
) -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
self.inner.acceleration_structure = acceleration_structure;
self
}
pub fn memory(
mut self,
memory: DeviceMemory,
) -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
self.inner.memory = memory;
self
}
pub fn memory_offset(
mut self,
memory_offset: DeviceSize,
) -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
self.inner.memory_offset = memory_offset;
self
}
pub fn device_indices(
mut self,
device_indices: &'a [u32],
) -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
self.inner.device_index_count = device_indices.len() as _;
self.inner.p_device_indices = device_indices.as_ptr();
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBindAccelerationStructureMemoryInfoNV>(
mut self,
next: &'a mut T,
) -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BindAccelerationStructureMemoryInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkWriteDescriptorSetAccelerationStructureNV.html>"]
pub struct WriteDescriptorSetAccelerationStructureNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub acceleration_structure_count: u32,
pub p_acceleration_structures: *const AccelerationStructureNV,
}
impl ::std::default::Default for WriteDescriptorSetAccelerationStructureNV {
fn default() -> WriteDescriptorSetAccelerationStructureNV {
WriteDescriptorSetAccelerationStructureNV {
s_type: StructureType::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
p_next: ::std::ptr::null(),
acceleration_structure_count: u32::default(),
p_acceleration_structures: ::std::ptr::null(),
}
}
}
impl WriteDescriptorSetAccelerationStructureNV {
pub fn builder<'a>() -> WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
WriteDescriptorSetAccelerationStructureNVBuilder {
inner: WriteDescriptorSetAccelerationStructureNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
inner: WriteDescriptorSetAccelerationStructureNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureNVBuilder<'_> {}
unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureNV {}
impl<'a> ::std::ops::Deref for WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
type Target = WriteDescriptorSetAccelerationStructureNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
pub fn acceleration_structures(
mut self,
acceleration_structures: &'a [AccelerationStructureNV],
) -> WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
self.inner.acceleration_structure_count = acceleration_structures.len() as _;
self.inner.p_acceleration_structures = acceleration_structures.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> WriteDescriptorSetAccelerationStructureNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html>"]
pub struct AccelerationStructureMemoryRequirementsInfoNV {
pub s_type: StructureType,
pub p_next: *const c_void,
pub ty: AccelerationStructureMemoryRequirementsTypeNV,
pub acceleration_structure: AccelerationStructureNV,
}
impl ::std::default::Default for AccelerationStructureMemoryRequirementsInfoNV {
fn default() -> AccelerationStructureMemoryRequirementsInfoNV {
AccelerationStructureMemoryRequirementsInfoNV {
s_type: StructureType::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
p_next: ::std::ptr::null(),
ty: AccelerationStructureMemoryRequirementsTypeNV::default(),
acceleration_structure: AccelerationStructureNV::default(),
}
}
}
impl AccelerationStructureMemoryRequirementsInfoNV {
pub fn builder<'a>() -> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
AccelerationStructureMemoryRequirementsInfoNVBuilder {
inner: AccelerationStructureMemoryRequirementsInfoNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
inner: AccelerationStructureMemoryRequirementsInfoNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsAccelerationStructureMemoryRequirementsInfoNV {}
impl<'a> ::std::ops::Deref for AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
type Target = AccelerationStructureMemoryRequirementsInfoNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
pub fn ty(
mut self,
ty: AccelerationStructureMemoryRequirementsTypeNV,
) -> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
self.inner.ty = ty;
self
}
pub fn acceleration_structure(
mut self,
acceleration_structure: AccelerationStructureNV,
) -> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
self.inner.acceleration_structure = acceleration_structure;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsAccelerationStructureMemoryRequirementsInfoNV>(
mut self,
next: &'a mut T,
) -> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> AccelerationStructureMemoryRequirementsInfoNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceRayTracingPropertiesNV.html>"]
pub struct PhysicalDeviceRayTracingPropertiesNV {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub shader_group_handle_size: u32,
pub max_recursion_depth: u32,
pub max_shader_group_stride: u32,
pub shader_group_base_alignment: u32,
pub max_geometry_count: u64,
pub max_instance_count: u64,
pub max_triangle_count: u64,
pub max_descriptor_set_acceleration_structures: u32,
}
impl ::std::default::Default for PhysicalDeviceRayTracingPropertiesNV {
fn default() -> PhysicalDeviceRayTracingPropertiesNV {
PhysicalDeviceRayTracingPropertiesNV {
s_type: StructureType::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
p_next: ::std::ptr::null_mut(),
shader_group_handle_size: u32::default(),
max_recursion_depth: u32::default(),
max_shader_group_stride: u32::default(),
shader_group_base_alignment: u32::default(),
max_geometry_count: u64::default(),
max_instance_count: u64::default(),
max_triangle_count: u64::default(),
max_descriptor_set_acceleration_structures: u32::default(),
}
}
}
impl PhysicalDeviceRayTracingPropertiesNV {
pub fn builder<'a>() -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
PhysicalDeviceRayTracingPropertiesNVBuilder {
inner: PhysicalDeviceRayTracingPropertiesNV::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
inner: PhysicalDeviceRayTracingPropertiesNV,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPropertiesNVBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPropertiesNV {}
impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
type Target = PhysicalDeviceRayTracingPropertiesNV;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
pub fn shader_group_handle_size(
mut self,
shader_group_handle_size: u32,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.shader_group_handle_size = shader_group_handle_size;
self
}
pub fn max_recursion_depth(
mut self,
max_recursion_depth: u32,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_recursion_depth = max_recursion_depth;
self
}
pub fn max_shader_group_stride(
mut self,
max_shader_group_stride: u32,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_shader_group_stride = max_shader_group_stride;
self
}
pub fn shader_group_base_alignment(
mut self,
shader_group_base_alignment: u32,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.shader_group_base_alignment = shader_group_base_alignment;
self
}
pub fn max_geometry_count(
mut self,
max_geometry_count: u64,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_geometry_count = max_geometry_count;
self
}
pub fn max_instance_count(
mut self,
max_instance_count: u64,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_instance_count = max_instance_count;
self
}
pub fn max_triangle_count(
mut self,
max_triangle_count: u64,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_triangle_count = max_triangle_count;
self
}
pub fn max_descriptor_set_acceleration_structures(
mut self,
max_descriptor_set_acceleration_structures: u32,
) -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
self.inner.max_descriptor_set_acceleration_structures =
max_descriptor_set_acceleration_structures;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceRayTracingPropertiesNV {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDrmFormatModifierPropertiesListEXT.html>"]
pub struct DrmFormatModifierPropertiesListEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub drm_format_modifier_count: u32,
pub p_drm_format_modifier_properties: *mut DrmFormatModifierPropertiesEXT,
}
impl ::std::default::Default for DrmFormatModifierPropertiesListEXT {
fn default() -> DrmFormatModifierPropertiesListEXT {
DrmFormatModifierPropertiesListEXT {
s_type: StructureType::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
p_next: ::std::ptr::null_mut(),
drm_format_modifier_count: u32::default(),
p_drm_format_modifier_properties: ::std::ptr::null_mut(),
}
}
}
impl DrmFormatModifierPropertiesListEXT {
pub fn builder<'a>() -> DrmFormatModifierPropertiesListEXTBuilder<'a> {
DrmFormatModifierPropertiesListEXTBuilder {
inner: DrmFormatModifierPropertiesListEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DrmFormatModifierPropertiesListEXTBuilder<'a> {
inner: DrmFormatModifierPropertiesListEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesListEXTBuilder<'_> {}
unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesListEXT {}
impl<'a> ::std::ops::Deref for DrmFormatModifierPropertiesListEXTBuilder<'a> {
type Target = DrmFormatModifierPropertiesListEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DrmFormatModifierPropertiesListEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DrmFormatModifierPropertiesListEXTBuilder<'a> {
pub fn drm_format_modifier_properties(
mut self,
drm_format_modifier_properties: &'a mut [DrmFormatModifierPropertiesEXT],
) -> DrmFormatModifierPropertiesListEXTBuilder<'a> {
self.inner.drm_format_modifier_count = drm_format_modifier_properties.len() as _;
self.inner.p_drm_format_modifier_properties = drm_format_modifier_properties.as_mut_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DrmFormatModifierPropertiesListEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Default, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDrmFormatModifierPropertiesEXT.html>"]
pub struct DrmFormatModifierPropertiesEXT {
pub drm_format_modifier: u64,
pub drm_format_modifier_plane_count: u32,
pub drm_format_modifier_tiling_features: FormatFeatureFlags,
}
impl DrmFormatModifierPropertiesEXT {
pub fn builder<'a>() -> DrmFormatModifierPropertiesEXTBuilder<'a> {
DrmFormatModifierPropertiesEXTBuilder {
inner: DrmFormatModifierPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DrmFormatModifierPropertiesEXTBuilder<'a> {
inner: DrmFormatModifierPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
impl<'a> ::std::ops::Deref for DrmFormatModifierPropertiesEXTBuilder<'a> {
type Target = DrmFormatModifierPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DrmFormatModifierPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DrmFormatModifierPropertiesEXTBuilder<'a> {
pub fn drm_format_modifier(
mut self,
drm_format_modifier: u64,
) -> DrmFormatModifierPropertiesEXTBuilder<'a> {
self.inner.drm_format_modifier = drm_format_modifier;
self
}
pub fn drm_format_modifier_plane_count(
mut self,
drm_format_modifier_plane_count: u32,
) -> DrmFormatModifierPropertiesEXTBuilder<'a> {
self.inner.drm_format_modifier_plane_count = drm_format_modifier_plane_count;
self
}
pub fn drm_format_modifier_tiling_features(
mut self,
drm_format_modifier_tiling_features: FormatFeatureFlags,
) -> DrmFormatModifierPropertiesEXTBuilder<'a> {
self.inner.drm_format_modifier_tiling_features = drm_format_modifier_tiling_features;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DrmFormatModifierPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html>"]
pub struct PhysicalDeviceImageDrmFormatModifierInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub drm_format_modifier: u64,
pub sharing_mode: SharingMode,
pub queue_family_index_count: u32,
pub p_queue_family_indices: *const u32,
}
impl ::std::default::Default for PhysicalDeviceImageDrmFormatModifierInfoEXT {
fn default() -> PhysicalDeviceImageDrmFormatModifierInfoEXT {
PhysicalDeviceImageDrmFormatModifierInfoEXT {
s_type: StructureType::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
p_next: ::std::ptr::null(),
drm_format_modifier: u64::default(),
sharing_mode: SharingMode::default(),
queue_family_index_count: u32::default(),
p_queue_family_indices: ::std::ptr::null(),
}
}
}
impl PhysicalDeviceImageDrmFormatModifierInfoEXT {
pub fn builder<'a>() -> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder {
inner: PhysicalDeviceImageDrmFormatModifierInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
inner: PhysicalDeviceImageDrmFormatModifierInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2
for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceImageDrmFormatModifierInfoEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
type Target = PhysicalDeviceImageDrmFormatModifierInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
pub fn drm_format_modifier(
mut self,
drm_format_modifier: u64,
) -> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
self.inner.drm_format_modifier = drm_format_modifier;
self
}
pub fn sharing_mode(
mut self,
sharing_mode: SharingMode,
) -> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
self.inner.sharing_mode = sharing_mode;
self
}
pub fn queue_family_indices(
mut self,
queue_family_indices: &'a [u32],
) -> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
self.inner.queue_family_index_count = queue_family_indices.len() as _;
self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceImageDrmFormatModifierInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html>"]
pub struct ImageDrmFormatModifierListCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub drm_format_modifier_count: u32,
pub p_drm_format_modifiers: *const u64,
}
impl ::std::default::Default for ImageDrmFormatModifierListCreateInfoEXT {
fn default() -> ImageDrmFormatModifierListCreateInfoEXT {
ImageDrmFormatModifierListCreateInfoEXT {
s_type: StructureType::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
drm_format_modifier_count: u32::default(),
p_drm_format_modifiers: ::std::ptr::null(),
}
}
}
impl ImageDrmFormatModifierListCreateInfoEXT {
pub fn builder<'a>() -> ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
ImageDrmFormatModifierListCreateInfoEXTBuilder {
inner: ImageDrmFormatModifierListCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
inner: ImageDrmFormatModifierListCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierListCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierListCreateInfoEXT {}
impl<'a> ::std::ops::Deref for ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
type Target = ImageDrmFormatModifierListCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
pub fn drm_format_modifiers(
mut self,
drm_format_modifiers: &'a [u64],
) -> ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
self.inner.drm_format_modifier_count = drm_format_modifiers.len() as _;
self.inner.p_drm_format_modifiers = drm_format_modifiers.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageDrmFormatModifierListCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html>"]
pub struct ImageDrmFormatModifierExplicitCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub drm_format_modifier: u64,
pub drm_format_modifier_plane_count: u32,
pub p_plane_layouts: *const SubresourceLayout,
}
impl ::std::default::Default for ImageDrmFormatModifierExplicitCreateInfoEXT {
fn default() -> ImageDrmFormatModifierExplicitCreateInfoEXT {
ImageDrmFormatModifierExplicitCreateInfoEXT {
s_type: StructureType::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
drm_format_modifier: u64::default(),
drm_format_modifier_plane_count: u32::default(),
p_plane_layouts: ::std::ptr::null(),
}
}
}
impl ImageDrmFormatModifierExplicitCreateInfoEXT {
pub fn builder<'a>() -> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
ImageDrmFormatModifierExplicitCreateInfoEXTBuilder {
inner: ImageDrmFormatModifierExplicitCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
inner: ImageDrmFormatModifierExplicitCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierExplicitCreateInfoEXT {}
impl<'a> ::std::ops::Deref for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
type Target = ImageDrmFormatModifierExplicitCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
pub fn drm_format_modifier(
mut self,
drm_format_modifier: u64,
) -> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
self.inner.drm_format_modifier = drm_format_modifier;
self
}
pub fn plane_layouts(
mut self,
plane_layouts: &'a [SubresourceLayout],
) -> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
self.inner.drm_format_modifier_plane_count = plane_layouts.len() as _;
self.inner.p_plane_layouts = plane_layouts.as_ptr();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageDrmFormatModifierExplicitCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageDrmFormatModifierPropertiesEXT.html>"]
pub struct ImageDrmFormatModifierPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub drm_format_modifier: u64,
}
impl ::std::default::Default for ImageDrmFormatModifierPropertiesEXT {
fn default() -> ImageDrmFormatModifierPropertiesEXT {
ImageDrmFormatModifierPropertiesEXT {
s_type: StructureType::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
drm_format_modifier: u64::default(),
}
}
}
impl ImageDrmFormatModifierPropertiesEXT {
pub fn builder<'a>() -> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
ImageDrmFormatModifierPropertiesEXTBuilder {
inner: ImageDrmFormatModifierPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
inner: ImageDrmFormatModifierPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsImageDrmFormatModifierPropertiesEXT {}
impl<'a> ::std::ops::Deref for ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
type Target = ImageDrmFormatModifierPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
pub fn drm_format_modifier(
mut self,
drm_format_modifier: u64,
) -> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
self.inner.drm_format_modifier = drm_format_modifier;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsImageDrmFormatModifierPropertiesEXT>(
mut self,
next: &'a mut T,
) -> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageDrmFormatModifierPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageStencilUsageCreateInfoEXT.html>"]
pub struct ImageStencilUsageCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub stencil_usage: ImageUsageFlags,
}
impl ::std::default::Default for ImageStencilUsageCreateInfoEXT {
fn default() -> ImageStencilUsageCreateInfoEXT {
ImageStencilUsageCreateInfoEXT {
s_type: StructureType::IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
stencil_usage: ImageUsageFlags::default(),
}
}
}
impl ImageStencilUsageCreateInfoEXT {
pub fn builder<'a>() -> ImageStencilUsageCreateInfoEXTBuilder<'a> {
ImageStencilUsageCreateInfoEXTBuilder {
inner: ImageStencilUsageCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct ImageStencilUsageCreateInfoEXTBuilder<'a> {
inner: ImageStencilUsageCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsImageCreateInfo for ImageStencilUsageCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsImageCreateInfo for ImageStencilUsageCreateInfoEXT {}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageStencilUsageCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageStencilUsageCreateInfoEXT {}
impl<'a> ::std::ops::Deref for ImageStencilUsageCreateInfoEXTBuilder<'a> {
type Target = ImageStencilUsageCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for ImageStencilUsageCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> ImageStencilUsageCreateInfoEXTBuilder<'a> {
pub fn stencil_usage(
mut self,
stencil_usage: ImageUsageFlags,
) -> ImageStencilUsageCreateInfoEXTBuilder<'a> {
self.inner.stencil_usage = stencil_usage;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> ImageStencilUsageCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html>"]
pub struct DeviceMemoryOverallocationCreateInfoAMD {
pub s_type: StructureType,
pub p_next: *const c_void,
pub overallocation_behavior: MemoryOverallocationBehaviorAMD,
}
impl ::std::default::Default for DeviceMemoryOverallocationCreateInfoAMD {
fn default() -> DeviceMemoryOverallocationCreateInfoAMD {
DeviceMemoryOverallocationCreateInfoAMD {
s_type: StructureType::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
p_next: ::std::ptr::null(),
overallocation_behavior: MemoryOverallocationBehaviorAMD::default(),
}
}
}
impl DeviceMemoryOverallocationCreateInfoAMD {
pub fn builder<'a>() -> DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
DeviceMemoryOverallocationCreateInfoAMDBuilder {
inner: DeviceMemoryOverallocationCreateInfoAMD::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
inner: DeviceMemoryOverallocationCreateInfoAMD,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for DeviceMemoryOverallocationCreateInfoAMDBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for DeviceMemoryOverallocationCreateInfoAMD {}
impl<'a> ::std::ops::Deref for DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
type Target = DeviceMemoryOverallocationCreateInfoAMD;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
pub fn overallocation_behavior(
mut self,
overallocation_behavior: MemoryOverallocationBehaviorAMD,
) -> DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
self.inner.overallocation_behavior = overallocation_behavior;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> DeviceMemoryOverallocationCreateInfoAMD {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html>"]
pub struct PhysicalDeviceFragmentDensityMapFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub fragment_density_map: Bool32,
pub fragment_density_map_dynamic: Bool32,
pub fragment_density_map_non_subsampled_images: Bool32,
}
impl ::std::default::Default for PhysicalDeviceFragmentDensityMapFeaturesEXT {
fn default() -> PhysicalDeviceFragmentDensityMapFeaturesEXT {
PhysicalDeviceFragmentDensityMapFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
fragment_density_map: Bool32::default(),
fragment_density_map_dynamic: Bool32::default(),
fragment_density_map_non_subsampled_images: Bool32::default(),
}
}
}
impl PhysicalDeviceFragmentDensityMapFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder {
inner: PhysicalDeviceFragmentDensityMapFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceFragmentDensityMapFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceFragmentDensityMapFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
pub fn fragment_density_map(
mut self,
fragment_density_map: bool,
) -> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
self.inner.fragment_density_map = fragment_density_map.into();
self
}
pub fn fragment_density_map_dynamic(
mut self,
fragment_density_map_dynamic: bool,
) -> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
self.inner.fragment_density_map_dynamic = fragment_density_map_dynamic.into();
self
}
pub fn fragment_density_map_non_subsampled_images(
mut self,
fragment_density_map_non_subsampled_images: bool,
) -> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
self.inner.fragment_density_map_non_subsampled_images =
fragment_density_map_non_subsampled_images.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFragmentDensityMapFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html>"]
pub struct PhysicalDeviceFragmentDensityMapPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub min_fragment_density_texel_size: Extent2D,
pub max_fragment_density_texel_size: Extent2D,
pub fragment_density_invocations: Bool32,
}
impl ::std::default::Default for PhysicalDeviceFragmentDensityMapPropertiesEXT {
fn default() -> PhysicalDeviceFragmentDensityMapPropertiesEXT {
PhysicalDeviceFragmentDensityMapPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
min_fragment_density_texel_size: Extent2D::default(),
max_fragment_density_texel_size: Extent2D::default(),
fragment_density_invocations: Bool32::default(),
}
}
}
impl PhysicalDeviceFragmentDensityMapPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder {
inner: PhysicalDeviceFragmentDensityMapPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceFragmentDensityMapPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceProperties2
for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMapPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceFragmentDensityMapPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
pub fn min_fragment_density_texel_size(
mut self,
min_fragment_density_texel_size: Extent2D,
) -> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
self.inner.min_fragment_density_texel_size = min_fragment_density_texel_size;
self
}
pub fn max_fragment_density_texel_size(
mut self,
max_fragment_density_texel_size: Extent2D,
) -> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
self.inner.max_fragment_density_texel_size = max_fragment_density_texel_size;
self
}
pub fn fragment_density_invocations(
mut self,
fragment_density_invocations: bool,
) -> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
self.inner.fragment_density_invocations = fragment_density_invocations.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceFragmentDensityMapPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassFragmentDensityMapCreateInfoEXT.html>"]
pub struct RenderPassFragmentDensityMapCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub fragment_density_map_attachment: AttachmentReference,
}
impl ::std::default::Default for RenderPassFragmentDensityMapCreateInfoEXT {
fn default() -> RenderPassFragmentDensityMapCreateInfoEXT {
RenderPassFragmentDensityMapCreateInfoEXT {
s_type: StructureType::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
fragment_density_map_attachment: AttachmentReference::default(),
}
}
}
impl RenderPassFragmentDensityMapCreateInfoEXT {
pub fn builder<'a>() -> RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
RenderPassFragmentDensityMapCreateInfoEXTBuilder {
inner: RenderPassFragmentDensityMapCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
inner: RenderPassFragmentDensityMapCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsRenderPassCreateInfo for RenderPassFragmentDensityMapCreateInfoEXT {}
impl<'a> ::std::ops::Deref for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
type Target = RenderPassFragmentDensityMapCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
pub fn fragment_density_map_attachment(
mut self,
fragment_density_map_attachment: AttachmentReference,
) -> RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
self.inner.fragment_density_map_attachment = fragment_density_map_attachment;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> RenderPassFragmentDensityMapCreateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceScalarBlockLayoutFeaturesEXT.html>"]
pub struct PhysicalDeviceScalarBlockLayoutFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub scalar_block_layout: Bool32,
}
impl ::std::default::Default for PhysicalDeviceScalarBlockLayoutFeaturesEXT {
fn default() -> PhysicalDeviceScalarBlockLayoutFeaturesEXT {
PhysicalDeviceScalarBlockLayoutFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
scalar_block_layout: Bool32::default(),
}
}
}
impl PhysicalDeviceScalarBlockLayoutFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder {
inner: PhysicalDeviceScalarBlockLayoutFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceScalarBlockLayoutFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceScalarBlockLayoutFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceScalarBlockLayoutFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
pub fn scalar_block_layout(
mut self,
scalar_block_layout: bool,
) -> PhysicalDeviceScalarBlockLayoutFeaturesEXTBuilder<'a> {
self.inner.scalar_block_layout = scalar_block_layout.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceScalarBlockLayoutFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html>"]
pub struct PhysicalDeviceMemoryBudgetPropertiesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub heap_budget: [DeviceSize; MAX_MEMORY_HEAPS],
pub heap_usage: [DeviceSize; MAX_MEMORY_HEAPS],
}
impl ::std::default::Default for PhysicalDeviceMemoryBudgetPropertiesEXT {
fn default() -> PhysicalDeviceMemoryBudgetPropertiesEXT {
PhysicalDeviceMemoryBudgetPropertiesEXT {
s_type: StructureType::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
p_next: ::std::ptr::null_mut(),
heap_budget: unsafe { ::std::mem::zeroed() },
heap_usage: unsafe { ::std::mem::zeroed() },
}
}
}
impl PhysicalDeviceMemoryBudgetPropertiesEXT {
pub fn builder<'a>() -> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
PhysicalDeviceMemoryBudgetPropertiesEXTBuilder {
inner: PhysicalDeviceMemoryBudgetPropertiesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
inner: PhysicalDeviceMemoryBudgetPropertiesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsPhysicalDeviceMemoryProperties2
for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'_>
{
}
unsafe impl ExtendsPhysicalDeviceMemoryProperties2 for PhysicalDeviceMemoryBudgetPropertiesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
type Target = PhysicalDeviceMemoryBudgetPropertiesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
pub fn heap_budget(
mut self,
heap_budget: [DeviceSize; MAX_MEMORY_HEAPS],
) -> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
self.inner.heap_budget = heap_budget;
self
}
pub fn heap_usage(
mut self,
heap_usage: [DeviceSize; MAX_MEMORY_HEAPS],
) -> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
self.inner.heap_usage = heap_usage;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMemoryBudgetPropertiesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html>"]
pub struct PhysicalDeviceMemoryPriorityFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub memory_priority: Bool32,
}
impl ::std::default::Default for PhysicalDeviceMemoryPriorityFeaturesEXT {
fn default() -> PhysicalDeviceMemoryPriorityFeaturesEXT {
PhysicalDeviceMemoryPriorityFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
memory_priority: Bool32::default(),
}
}
}
impl PhysicalDeviceMemoryPriorityFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
PhysicalDeviceMemoryPriorityFeaturesEXTBuilder {
inner: PhysicalDeviceMemoryPriorityFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceMemoryPriorityFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryPriorityFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceMemoryPriorityFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
pub fn memory_priority(
mut self,
memory_priority: bool,
) -> PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
self.inner.memory_priority = memory_priority.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceMemoryPriorityFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryPriorityAllocateInfoEXT.html>"]
pub struct MemoryPriorityAllocateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub priority: f32,
}
impl ::std::default::Default for MemoryPriorityAllocateInfoEXT {
fn default() -> MemoryPriorityAllocateInfoEXT {
MemoryPriorityAllocateInfoEXT {
s_type: StructureType::MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
p_next: ::std::ptr::null(),
priority: f32::default(),
}
}
}
impl MemoryPriorityAllocateInfoEXT {
pub fn builder<'a>() -> MemoryPriorityAllocateInfoEXTBuilder<'a> {
MemoryPriorityAllocateInfoEXTBuilder {
inner: MemoryPriorityAllocateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct MemoryPriorityAllocateInfoEXTBuilder<'a> {
inner: MemoryPriorityAllocateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsMemoryAllocateInfo for MemoryPriorityAllocateInfoEXTBuilder<'_> {}
unsafe impl ExtendsMemoryAllocateInfo for MemoryPriorityAllocateInfoEXT {}
impl<'a> ::std::ops::Deref for MemoryPriorityAllocateInfoEXTBuilder<'a> {
type Target = MemoryPriorityAllocateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for MemoryPriorityAllocateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> MemoryPriorityAllocateInfoEXTBuilder<'a> {
pub fn priority(mut self, priority: f32) -> MemoryPriorityAllocateInfoEXTBuilder<'a> {
self.inner.priority = priority;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> MemoryPriorityAllocateInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceBufferAddressFeaturesEXT.html>"]
pub struct PhysicalDeviceBufferAddressFeaturesEXT {
pub s_type: StructureType,
pub p_next: *mut c_void,
pub buffer_device_address: Bool32,
pub buffer_device_address_capture_replay: Bool32,
pub buffer_device_address_multi_device: Bool32,
}
impl ::std::default::Default for PhysicalDeviceBufferAddressFeaturesEXT {
fn default() -> PhysicalDeviceBufferAddressFeaturesEXT {
PhysicalDeviceBufferAddressFeaturesEXT {
s_type: StructureType::PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
p_next: ::std::ptr::null_mut(),
buffer_device_address: Bool32::default(),
buffer_device_address_capture_replay: Bool32::default(),
buffer_device_address_multi_device: Bool32::default(),
}
}
}
impl PhysicalDeviceBufferAddressFeaturesEXT {
pub fn builder<'a>() -> PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
PhysicalDeviceBufferAddressFeaturesEXTBuilder {
inner: PhysicalDeviceBufferAddressFeaturesEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
inner: PhysicalDeviceBufferAddressFeaturesEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferAddressFeaturesEXTBuilder<'_> {}
unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferAddressFeaturesEXT {}
impl<'a> ::std::ops::Deref for PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
type Target = PhysicalDeviceBufferAddressFeaturesEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
pub fn buffer_device_address(
mut self,
buffer_device_address: bool,
) -> PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
self.inner.buffer_device_address = buffer_device_address.into();
self
}
pub fn buffer_device_address_capture_replay(
mut self,
buffer_device_address_capture_replay: bool,
) -> PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
self.inner.buffer_device_address_capture_replay =
buffer_device_address_capture_replay.into();
self
}
pub fn buffer_device_address_multi_device(
mut self,
buffer_device_address_multi_device: bool,
) -> PhysicalDeviceBufferAddressFeaturesEXTBuilder<'a> {
self.inner.buffer_device_address_multi_device = buffer_device_address_multi_device.into();
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> PhysicalDeviceBufferAddressFeaturesEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferDeviceAddressInfoEXT.html>"]
pub struct BufferDeviceAddressInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub buffer: Buffer,
}
impl ::std::default::Default for BufferDeviceAddressInfoEXT {
fn default() -> BufferDeviceAddressInfoEXT {
BufferDeviceAddressInfoEXT {
s_type: StructureType::BUFFER_DEVICE_ADDRESS_INFO_EXT,
p_next: ::std::ptr::null(),
buffer: Buffer::default(),
}
}
}
impl BufferDeviceAddressInfoEXT {
pub fn builder<'a>() -> BufferDeviceAddressInfoEXTBuilder<'a> {
BufferDeviceAddressInfoEXTBuilder {
inner: BufferDeviceAddressInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferDeviceAddressInfoEXTBuilder<'a> {
inner: BufferDeviceAddressInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
pub unsafe trait ExtendsBufferDeviceAddressInfoEXT {}
impl<'a> ::std::ops::Deref for BufferDeviceAddressInfoEXTBuilder<'a> {
type Target = BufferDeviceAddressInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferDeviceAddressInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferDeviceAddressInfoEXTBuilder<'a> {
pub fn buffer(mut self, buffer: Buffer) -> BufferDeviceAddressInfoEXTBuilder<'a> {
self.inner.buffer = buffer;
self
}
#[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
#[doc = r" method only exists on structs that can be passed to a function directly. Only"]
#[doc = r" valid extension structs can be pushed into the chain."]
#[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
#[doc = r" chain will look like `A -> D -> B -> C`."]
pub fn push_next<T: ExtendsBufferDeviceAddressInfoEXT>(
mut self,
next: &'a mut T,
) -> BufferDeviceAddressInfoEXTBuilder<'a> {
unsafe {
let next_ptr = next as *mut T as *mut BaseOutStructure;
let last_next = ptr_chain_iter(next).last().unwrap();
(*last_next).p_next = self.inner.p_next as _;
self.inner.p_next = next_ptr as _;
}
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferDeviceAddressInfoEXT {
self.inner
}
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferDeviceAddressCreateInfoEXT.html>"]
pub struct BufferDeviceAddressCreateInfoEXT {
pub s_type: StructureType,
pub p_next: *const c_void,
pub device_address: DeviceSize,
}
impl ::std::default::Default for BufferDeviceAddressCreateInfoEXT {
fn default() -> BufferDeviceAddressCreateInfoEXT {
BufferDeviceAddressCreateInfoEXT {
s_type: StructureType::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
p_next: ::std::ptr::null(),
device_address: DeviceSize::default(),
}
}
}
impl BufferDeviceAddressCreateInfoEXT {
pub fn builder<'a>() -> BufferDeviceAddressCreateInfoEXTBuilder<'a> {
BufferDeviceAddressCreateInfoEXTBuilder {
inner: BufferDeviceAddressCreateInfoEXT::default(),
marker: ::std::marker::PhantomData,
}
}
}
#[repr(transparent)]
pub struct BufferDeviceAddressCreateInfoEXTBuilder<'a> {
inner: BufferDeviceAddressCreateInfoEXT,
marker: ::std::marker::PhantomData<&'a ()>,
}
unsafe impl ExtendsBufferCreateInfo for BufferDeviceAddressCreateInfoEXTBuilder<'_> {}
unsafe impl ExtendsBufferCreateInfo for BufferDeviceAddressCreateInfoEXT {}
impl<'a> ::std::ops::Deref for BufferDeviceAddressCreateInfoEXTBuilder<'a> {
type Target = BufferDeviceAddressCreateInfoEXT;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'a> ::std::ops::DerefMut for BufferDeviceAddressCreateInfoEXTBuilder<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<'a> BufferDeviceAddressCreateInfoEXTBuilder<'a> {
pub fn device_address(
mut self,
device_address: DeviceSize,
) -> BufferDeviceAddressCreateInfoEXTBuilder<'a> {
self.inner.device_address = device_address;
self
}
#[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
#[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
#[doc = r" so references to builders can be passed directly to Vulkan functions."]
pub fn build(self) -> BufferDeviceAddressCreateInfoEXT {
self.inner
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageLayout.html>"]
pub struct ImageLayout(pub(crate) i32);
impl ImageLayout {
pub fn from_raw(x: i32) -> Self {
ImageLayout(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ImageLayout {
#[doc = "Implicit layout an image is when its contents are undefined due to various reasons (e.g. right after creation)"]
pub const UNDEFINED: Self = ImageLayout(0);
#[doc = "General layout when image can be used for any kind of access"]
pub const GENERAL: Self = ImageLayout(1);
#[doc = "Optimal layout when image is only used for color attachment read/write"]
pub const COLOR_ATTACHMENT_OPTIMAL: Self = ImageLayout(2);
#[doc = "Optimal layout when image is only used for depth/stencil attachment read/write"]
pub const DEPTH_STENCIL_ATTACHMENT_OPTIMAL: Self = ImageLayout(3);
#[doc = "Optimal layout when image is used for read only depth/stencil attachment and shader access"]
pub const DEPTH_STENCIL_READ_ONLY_OPTIMAL: Self = ImageLayout(4);
#[doc = "Optimal layout when image is used for read only shader access"]
pub const SHADER_READ_ONLY_OPTIMAL: Self = ImageLayout(5);
#[doc = "Optimal layout when image is used only as source of transfer operations"]
pub const TRANSFER_SRC_OPTIMAL: Self = ImageLayout(6);
#[doc = "Optimal layout when image is used only as destination of transfer operations"]
pub const TRANSFER_DST_OPTIMAL: Self = ImageLayout(7);
#[doc = "Initial layout used when the data is populated by the CPU"]
pub const PREINITIALIZED: Self = ImageLayout(8);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentLoadOp.html>"]
pub struct AttachmentLoadOp(pub(crate) i32);
impl AttachmentLoadOp {
pub fn from_raw(x: i32) -> Self {
AttachmentLoadOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl AttachmentLoadOp {
pub const LOAD: Self = AttachmentLoadOp(0);
pub const CLEAR: Self = AttachmentLoadOp(1);
pub const DONT_CARE: Self = AttachmentLoadOp(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentStoreOp.html>"]
pub struct AttachmentStoreOp(pub(crate) i32);
impl AttachmentStoreOp {
pub fn from_raw(x: i32) -> Self {
AttachmentStoreOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl AttachmentStoreOp {
pub const STORE: Self = AttachmentStoreOp(0);
pub const DONT_CARE: Self = AttachmentStoreOp(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageType.html>"]
pub struct ImageType(pub(crate) i32);
impl ImageType {
pub fn from_raw(x: i32) -> Self {
ImageType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ImageType {
pub const TYPE_1D: Self = ImageType(0);
pub const TYPE_2D: Self = ImageType(1);
pub const TYPE_3D: Self = ImageType(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageTiling.html>"]
pub struct ImageTiling(pub(crate) i32);
impl ImageTiling {
pub fn from_raw(x: i32) -> Self {
ImageTiling(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ImageTiling {
pub const OPTIMAL: Self = ImageTiling(0);
pub const LINEAR: Self = ImageTiling(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageViewType.html>"]
pub struct ImageViewType(pub(crate) i32);
impl ImageViewType {
pub fn from_raw(x: i32) -> Self {
ImageViewType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ImageViewType {
pub const TYPE_1D: Self = ImageViewType(0);
pub const TYPE_2D: Self = ImageViewType(1);
pub const TYPE_3D: Self = ImageViewType(2);
pub const CUBE: Self = ImageViewType(3);
pub const TYPE_1D_ARRAY: Self = ImageViewType(4);
pub const TYPE_2D_ARRAY: Self = ImageViewType(5);
pub const CUBE_ARRAY: Self = ImageViewType(6);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferLevel.html>"]
pub struct CommandBufferLevel(pub(crate) i32);
impl CommandBufferLevel {
pub fn from_raw(x: i32) -> Self {
CommandBufferLevel(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl CommandBufferLevel {
pub const PRIMARY: Self = CommandBufferLevel(0);
pub const SECONDARY: Self = CommandBufferLevel(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkComponentSwizzle.html>"]
pub struct ComponentSwizzle(pub(crate) i32);
impl ComponentSwizzle {
pub fn from_raw(x: i32) -> Self {
ComponentSwizzle(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ComponentSwizzle {
pub const IDENTITY: Self = ComponentSwizzle(0);
pub const ZERO: Self = ComponentSwizzle(1);
pub const ONE: Self = ComponentSwizzle(2);
pub const R: Self = ComponentSwizzle(3);
pub const G: Self = ComponentSwizzle(4);
pub const B: Self = ComponentSwizzle(5);
pub const A: Self = ComponentSwizzle(6);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorType.html>"]
pub struct DescriptorType(pub(crate) i32);
impl DescriptorType {
pub fn from_raw(x: i32) -> Self {
DescriptorType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DescriptorType {
pub const SAMPLER: Self = DescriptorType(0);
pub const COMBINED_IMAGE_SAMPLER: Self = DescriptorType(1);
pub const SAMPLED_IMAGE: Self = DescriptorType(2);
pub const STORAGE_IMAGE: Self = DescriptorType(3);
pub const UNIFORM_TEXEL_BUFFER: Self = DescriptorType(4);
pub const STORAGE_TEXEL_BUFFER: Self = DescriptorType(5);
pub const UNIFORM_BUFFER: Self = DescriptorType(6);
pub const STORAGE_BUFFER: Self = DescriptorType(7);
pub const UNIFORM_BUFFER_DYNAMIC: Self = DescriptorType(8);
pub const STORAGE_BUFFER_DYNAMIC: Self = DescriptorType(9);
pub const INPUT_ATTACHMENT: Self = DescriptorType(10);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryType.html>"]
pub struct QueryType(pub(crate) i32);
impl QueryType {
pub fn from_raw(x: i32) -> Self {
QueryType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl QueryType {
pub const OCCLUSION: Self = QueryType(0);
#[doc = "Optional"]
pub const PIPELINE_STATISTICS: Self = QueryType(1);
pub const TIMESTAMP: Self = QueryType(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBorderColor.html>"]
pub struct BorderColor(pub(crate) i32);
impl BorderColor {
pub fn from_raw(x: i32) -> Self {
BorderColor(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl BorderColor {
pub const FLOAT_TRANSPARENT_BLACK: Self = BorderColor(0);
pub const INT_TRANSPARENT_BLACK: Self = BorderColor(1);
pub const FLOAT_OPAQUE_BLACK: Self = BorderColor(2);
pub const INT_OPAQUE_BLACK: Self = BorderColor(3);
pub const FLOAT_OPAQUE_WHITE: Self = BorderColor(4);
pub const INT_OPAQUE_WHITE: Self = BorderColor(5);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineBindPoint.html>"]
pub struct PipelineBindPoint(pub(crate) i32);
impl PipelineBindPoint {
pub fn from_raw(x: i32) -> Self {
PipelineBindPoint(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PipelineBindPoint {
pub const GRAPHICS: Self = PipelineBindPoint(0);
pub const COMPUTE: Self = PipelineBindPoint(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCacheHeaderVersion.html>"]
pub struct PipelineCacheHeaderVersion(pub(crate) i32);
impl PipelineCacheHeaderVersion {
pub fn from_raw(x: i32) -> Self {
PipelineCacheHeaderVersion(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PipelineCacheHeaderVersion {
pub const ONE: Self = PipelineCacheHeaderVersion(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPrimitiveTopology.html>"]
pub struct PrimitiveTopology(pub(crate) i32);
impl PrimitiveTopology {
pub fn from_raw(x: i32) -> Self {
PrimitiveTopology(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PrimitiveTopology {
pub const POINT_LIST: Self = PrimitiveTopology(0);
pub const LINE_LIST: Self = PrimitiveTopology(1);
pub const LINE_STRIP: Self = PrimitiveTopology(2);
pub const TRIANGLE_LIST: Self = PrimitiveTopology(3);
pub const TRIANGLE_STRIP: Self = PrimitiveTopology(4);
pub const TRIANGLE_FAN: Self = PrimitiveTopology(5);
pub const LINE_LIST_WITH_ADJACENCY: Self = PrimitiveTopology(6);
pub const LINE_STRIP_WITH_ADJACENCY: Self = PrimitiveTopology(7);
pub const TRIANGLE_LIST_WITH_ADJACENCY: Self = PrimitiveTopology(8);
pub const TRIANGLE_STRIP_WITH_ADJACENCY: Self = PrimitiveTopology(9);
pub const PATCH_LIST: Self = PrimitiveTopology(10);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSharingMode.html>"]
pub struct SharingMode(pub(crate) i32);
impl SharingMode {
pub fn from_raw(x: i32) -> Self {
SharingMode(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SharingMode {
pub const EXCLUSIVE: Self = SharingMode(0);
pub const CONCURRENT: Self = SharingMode(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndexType.html>"]
pub struct IndexType(pub(crate) i32);
impl IndexType {
pub fn from_raw(x: i32) -> Self {
IndexType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl IndexType {
pub const UINT16: Self = IndexType(0);
pub const UINT32: Self = IndexType(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFilter.html>"]
pub struct Filter(pub(crate) i32);
impl Filter {
pub fn from_raw(x: i32) -> Self {
Filter(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl Filter {
pub const NEAREST: Self = Filter(0);
pub const LINEAR: Self = Filter(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerMipmapMode.html>"]
pub struct SamplerMipmapMode(pub(crate) i32);
impl SamplerMipmapMode {
pub fn from_raw(x: i32) -> Self {
SamplerMipmapMode(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SamplerMipmapMode {
#[doc = "Choose nearest mip level"]
pub const NEAREST: Self = SamplerMipmapMode(0);
#[doc = "Linear filter between mip levels"]
pub const LINEAR: Self = SamplerMipmapMode(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerAddressMode.html>"]
pub struct SamplerAddressMode(pub(crate) i32);
impl SamplerAddressMode {
pub fn from_raw(x: i32) -> Self {
SamplerAddressMode(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SamplerAddressMode {
pub const REPEAT: Self = SamplerAddressMode(0);
pub const MIRRORED_REPEAT: Self = SamplerAddressMode(1);
pub const CLAMP_TO_EDGE: Self = SamplerAddressMode(2);
pub const CLAMP_TO_BORDER: Self = SamplerAddressMode(3);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCompareOp.html>"]
pub struct CompareOp(pub(crate) i32);
impl CompareOp {
pub fn from_raw(x: i32) -> Self {
CompareOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl CompareOp {
pub const NEVER: Self = CompareOp(0);
pub const LESS: Self = CompareOp(1);
pub const EQUAL: Self = CompareOp(2);
pub const LESS_OR_EQUAL: Self = CompareOp(3);
pub const GREATER: Self = CompareOp(4);
pub const NOT_EQUAL: Self = CompareOp(5);
pub const GREATER_OR_EQUAL: Self = CompareOp(6);
pub const ALWAYS: Self = CompareOp(7);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPolygonMode.html>"]
pub struct PolygonMode(pub(crate) i32);
impl PolygonMode {
pub fn from_raw(x: i32) -> Self {
PolygonMode(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PolygonMode {
pub const FILL: Self = PolygonMode(0);
pub const LINE: Self = PolygonMode(1);
pub const POINT: Self = PolygonMode(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFrontFace.html>"]
pub struct FrontFace(pub(crate) i32);
impl FrontFace {
pub fn from_raw(x: i32) -> Self {
FrontFace(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl FrontFace {
pub const COUNTER_CLOCKWISE: Self = FrontFace(0);
pub const CLOCKWISE: Self = FrontFace(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBlendFactor.html>"]
pub struct BlendFactor(pub(crate) i32);
impl BlendFactor {
pub fn from_raw(x: i32) -> Self {
BlendFactor(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl BlendFactor {
pub const ZERO: Self = BlendFactor(0);
pub const ONE: Self = BlendFactor(1);
pub const SRC_COLOR: Self = BlendFactor(2);
pub const ONE_MINUS_SRC_COLOR: Self = BlendFactor(3);
pub const DST_COLOR: Self = BlendFactor(4);
pub const ONE_MINUS_DST_COLOR: Self = BlendFactor(5);
pub const SRC_ALPHA: Self = BlendFactor(6);
pub const ONE_MINUS_SRC_ALPHA: Self = BlendFactor(7);
pub const DST_ALPHA: Self = BlendFactor(8);
pub const ONE_MINUS_DST_ALPHA: Self = BlendFactor(9);
pub const CONSTANT_COLOR: Self = BlendFactor(10);
pub const ONE_MINUS_CONSTANT_COLOR: Self = BlendFactor(11);
pub const CONSTANT_ALPHA: Self = BlendFactor(12);
pub const ONE_MINUS_CONSTANT_ALPHA: Self = BlendFactor(13);
pub const SRC_ALPHA_SATURATE: Self = BlendFactor(14);
pub const SRC1_COLOR: Self = BlendFactor(15);
pub const ONE_MINUS_SRC1_COLOR: Self = BlendFactor(16);
pub const SRC1_ALPHA: Self = BlendFactor(17);
pub const ONE_MINUS_SRC1_ALPHA: Self = BlendFactor(18);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBlendOp.html>"]
pub struct BlendOp(pub(crate) i32);
impl BlendOp {
pub fn from_raw(x: i32) -> Self {
BlendOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl BlendOp {
pub const ADD: Self = BlendOp(0);
pub const SUBTRACT: Self = BlendOp(1);
pub const REVERSE_SUBTRACT: Self = BlendOp(2);
pub const MIN: Self = BlendOp(3);
pub const MAX: Self = BlendOp(4);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkStencilOp.html>"]
pub struct StencilOp(pub(crate) i32);
impl StencilOp {
pub fn from_raw(x: i32) -> Self {
StencilOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl StencilOp {
pub const KEEP: Self = StencilOp(0);
pub const ZERO: Self = StencilOp(1);
pub const REPLACE: Self = StencilOp(2);
pub const INCREMENT_AND_CLAMP: Self = StencilOp(3);
pub const DECREMENT_AND_CLAMP: Self = StencilOp(4);
pub const INVERT: Self = StencilOp(5);
pub const INCREMENT_AND_WRAP: Self = StencilOp(6);
pub const DECREMENT_AND_WRAP: Self = StencilOp(7);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkLogicOp.html>"]
pub struct LogicOp(pub(crate) i32);
impl LogicOp {
pub fn from_raw(x: i32) -> Self {
LogicOp(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl LogicOp {
pub const CLEAR: Self = LogicOp(0);
pub const AND: Self = LogicOp(1);
pub const AND_REVERSE: Self = LogicOp(2);
pub const COPY: Self = LogicOp(3);
pub const AND_INVERTED: Self = LogicOp(4);
pub const NO_OP: Self = LogicOp(5);
pub const XOR: Self = LogicOp(6);
pub const OR: Self = LogicOp(7);
pub const NOR: Self = LogicOp(8);
pub const EQUIVALENT: Self = LogicOp(9);
pub const INVERT: Self = LogicOp(10);
pub const OR_REVERSE: Self = LogicOp(11);
pub const COPY_INVERTED: Self = LogicOp(12);
pub const OR_INVERTED: Self = LogicOp(13);
pub const NAND: Self = LogicOp(14);
pub const SET: Self = LogicOp(15);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkInternalAllocationType.html>"]
pub struct InternalAllocationType(pub(crate) i32);
impl InternalAllocationType {
pub fn from_raw(x: i32) -> Self {
InternalAllocationType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl InternalAllocationType {
pub const EXECUTABLE: Self = InternalAllocationType(0);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSystemAllocationScope.html>"]
pub struct SystemAllocationScope(pub(crate) i32);
impl SystemAllocationScope {
pub fn from_raw(x: i32) -> Self {
SystemAllocationScope(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SystemAllocationScope {
pub const COMMAND: Self = SystemAllocationScope(0);
pub const OBJECT: Self = SystemAllocationScope(1);
pub const CACHE: Self = SystemAllocationScope(2);
pub const DEVICE: Self = SystemAllocationScope(3);
pub const INSTANCE: Self = SystemAllocationScope(4);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPhysicalDeviceType.html>"]
pub struct PhysicalDeviceType(pub(crate) i32);
impl PhysicalDeviceType {
pub fn from_raw(x: i32) -> Self {
PhysicalDeviceType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PhysicalDeviceType {
pub const OTHER: Self = PhysicalDeviceType(0);
pub const INTEGRATED_GPU: Self = PhysicalDeviceType(1);
pub const DISCRETE_GPU: Self = PhysicalDeviceType(2);
pub const VIRTUAL_GPU: Self = PhysicalDeviceType(3);
pub const CPU: Self = PhysicalDeviceType(4);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkVertexInputRate.html>"]
pub struct VertexInputRate(pub(crate) i32);
impl VertexInputRate {
pub fn from_raw(x: i32) -> Self {
VertexInputRate(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl VertexInputRate {
pub const VERTEX: Self = VertexInputRate(0);
pub const INSTANCE: Self = VertexInputRate(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFormat.html>"]
pub struct Format(pub(crate) i32);
impl Format {
pub fn from_raw(x: i32) -> Self {
Format(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl Format {
pub const UNDEFINED: Self = Format(0);
pub const R4G4_UNORM_PACK8: Self = Format(1);
pub const R4G4B4A4_UNORM_PACK16: Self = Format(2);
pub const B4G4R4A4_UNORM_PACK16: Self = Format(3);
pub const R5G6B5_UNORM_PACK16: Self = Format(4);
pub const B5G6R5_UNORM_PACK16: Self = Format(5);
pub const R5G5B5A1_UNORM_PACK16: Self = Format(6);
pub const B5G5R5A1_UNORM_PACK16: Self = Format(7);
pub const A1R5G5B5_UNORM_PACK16: Self = Format(8);
pub const R8_UNORM: Self = Format(9);
pub const R8_SNORM: Self = Format(10);
pub const R8_USCALED: Self = Format(11);
pub const R8_SSCALED: Self = Format(12);
pub const R8_UINT: Self = Format(13);
pub const R8_SINT: Self = Format(14);
pub const R8_SRGB: Self = Format(15);
pub const R8G8_UNORM: Self = Format(16);
pub const R8G8_SNORM: Self = Format(17);
pub const R8G8_USCALED: Self = Format(18);
pub const R8G8_SSCALED: Self = Format(19);
pub const R8G8_UINT: Self = Format(20);
pub const R8G8_SINT: Self = Format(21);
pub const R8G8_SRGB: Self = Format(22);
pub const R8G8B8_UNORM: Self = Format(23);
pub const R8G8B8_SNORM: Self = Format(24);
pub const R8G8B8_USCALED: Self = Format(25);
pub const R8G8B8_SSCALED: Self = Format(26);
pub const R8G8B8_UINT: Self = Format(27);
pub const R8G8B8_SINT: Self = Format(28);
pub const R8G8B8_SRGB: Self = Format(29);
pub const B8G8R8_UNORM: Self = Format(30);
pub const B8G8R8_SNORM: Self = Format(31);
pub const B8G8R8_USCALED: Self = Format(32);
pub const B8G8R8_SSCALED: Self = Format(33);
pub const B8G8R8_UINT: Self = Format(34);
pub const B8G8R8_SINT: Self = Format(35);
pub const B8G8R8_SRGB: Self = Format(36);
pub const R8G8B8A8_UNORM: Self = Format(37);
pub const R8G8B8A8_SNORM: Self = Format(38);
pub const R8G8B8A8_USCALED: Self = Format(39);
pub const R8G8B8A8_SSCALED: Self = Format(40);
pub const R8G8B8A8_UINT: Self = Format(41);
pub const R8G8B8A8_SINT: Self = Format(42);
pub const R8G8B8A8_SRGB: Self = Format(43);
pub const B8G8R8A8_UNORM: Self = Format(44);
pub const B8G8R8A8_SNORM: Self = Format(45);
pub const B8G8R8A8_USCALED: Self = Format(46);
pub const B8G8R8A8_SSCALED: Self = Format(47);
pub const B8G8R8A8_UINT: Self = Format(48);
pub const B8G8R8A8_SINT: Self = Format(49);
pub const B8G8R8A8_SRGB: Self = Format(50);
pub const A8B8G8R8_UNORM_PACK32: Self = Format(51);
pub const A8B8G8R8_SNORM_PACK32: Self = Format(52);
pub const A8B8G8R8_USCALED_PACK32: Self = Format(53);
pub const A8B8G8R8_SSCALED_PACK32: Self = Format(54);
pub const A8B8G8R8_UINT_PACK32: Self = Format(55);
pub const A8B8G8R8_SINT_PACK32: Self = Format(56);
pub const A8B8G8R8_SRGB_PACK32: Self = Format(57);
pub const A2R10G10B10_UNORM_PACK32: Self = Format(58);
pub const A2R10G10B10_SNORM_PACK32: Self = Format(59);
pub const A2R10G10B10_USCALED_PACK32: Self = Format(60);
pub const A2R10G10B10_SSCALED_PACK32: Self = Format(61);
pub const A2R10G10B10_UINT_PACK32: Self = Format(62);
pub const A2R10G10B10_SINT_PACK32: Self = Format(63);
pub const A2B10G10R10_UNORM_PACK32: Self = Format(64);
pub const A2B10G10R10_SNORM_PACK32: Self = Format(65);
pub const A2B10G10R10_USCALED_PACK32: Self = Format(66);
pub const A2B10G10R10_SSCALED_PACK32: Self = Format(67);
pub const A2B10G10R10_UINT_PACK32: Self = Format(68);
pub const A2B10G10R10_SINT_PACK32: Self = Format(69);
pub const R16_UNORM: Self = Format(70);
pub const R16_SNORM: Self = Format(71);
pub const R16_USCALED: Self = Format(72);
pub const R16_SSCALED: Self = Format(73);
pub const R16_UINT: Self = Format(74);
pub const R16_SINT: Self = Format(75);
pub const R16_SFLOAT: Self = Format(76);
pub const R16G16_UNORM: Self = Format(77);
pub const R16G16_SNORM: Self = Format(78);
pub const R16G16_USCALED: Self = Format(79);
pub const R16G16_SSCALED: Self = Format(80);
pub const R16G16_UINT: Self = Format(81);
pub const R16G16_SINT: Self = Format(82);
pub const R16G16_SFLOAT: Self = Format(83);
pub const R16G16B16_UNORM: Self = Format(84);
pub const R16G16B16_SNORM: Self = Format(85);
pub const R16G16B16_USCALED: Self = Format(86);
pub const R16G16B16_SSCALED: Self = Format(87);
pub const R16G16B16_UINT: Self = Format(88);
pub const R16G16B16_SINT: Self = Format(89);
pub const R16G16B16_SFLOAT: Self = Format(90);
pub const R16G16B16A16_UNORM: Self = Format(91);
pub const R16G16B16A16_SNORM: Self = Format(92);
pub const R16G16B16A16_USCALED: Self = Format(93);
pub const R16G16B16A16_SSCALED: Self = Format(94);
pub const R16G16B16A16_UINT: Self = Format(95);
pub const R16G16B16A16_SINT: Self = Format(96);
pub const R16G16B16A16_SFLOAT: Self = Format(97);
pub const R32_UINT: Self = Format(98);
pub const R32_SINT: Self = Format(99);
pub const R32_SFLOAT: Self = Format(100);
pub const R32G32_UINT: Self = Format(101);
pub const R32G32_SINT: Self = Format(102);
pub const R32G32_SFLOAT: Self = Format(103);
pub const R32G32B32_UINT: Self = Format(104);
pub const R32G32B32_SINT: Self = Format(105);
pub const R32G32B32_SFLOAT: Self = Format(106);
pub const R32G32B32A32_UINT: Self = Format(107);
pub const R32G32B32A32_SINT: Self = Format(108);
pub const R32G32B32A32_SFLOAT: Self = Format(109);
pub const R64_UINT: Self = Format(110);
pub const R64_SINT: Self = Format(111);
pub const R64_SFLOAT: Self = Format(112);
pub const R64G64_UINT: Self = Format(113);
pub const R64G64_SINT: Self = Format(114);
pub const R64G64_SFLOAT: Self = Format(115);
pub const R64G64B64_UINT: Self = Format(116);
pub const R64G64B64_SINT: Self = Format(117);
pub const R64G64B64_SFLOAT: Self = Format(118);
pub const R64G64B64A64_UINT: Self = Format(119);
pub const R64G64B64A64_SINT: Self = Format(120);
pub const R64G64B64A64_SFLOAT: Self = Format(121);
pub const B10G11R11_UFLOAT_PACK32: Self = Format(122);
pub const E5B9G9R9_UFLOAT_PACK32: Self = Format(123);
pub const D16_UNORM: Self = Format(124);
pub const X8_D24_UNORM_PACK32: Self = Format(125);
pub const D32_SFLOAT: Self = Format(126);
pub const S8_UINT: Self = Format(127);
pub const D16_UNORM_S8_UINT: Self = Format(128);
pub const D24_UNORM_S8_UINT: Self = Format(129);
pub const D32_SFLOAT_S8_UINT: Self = Format(130);
pub const BC1_RGB_UNORM_BLOCK: Self = Format(131);
pub const BC1_RGB_SRGB_BLOCK: Self = Format(132);
pub const BC1_RGBA_UNORM_BLOCK: Self = Format(133);
pub const BC1_RGBA_SRGB_BLOCK: Self = Format(134);
pub const BC2_UNORM_BLOCK: Self = Format(135);
pub const BC2_SRGB_BLOCK: Self = Format(136);
pub const BC3_UNORM_BLOCK: Self = Format(137);
pub const BC3_SRGB_BLOCK: Self = Format(138);
pub const BC4_UNORM_BLOCK: Self = Format(139);
pub const BC4_SNORM_BLOCK: Self = Format(140);
pub const BC5_UNORM_BLOCK: Self = Format(141);
pub const BC5_SNORM_BLOCK: Self = Format(142);
pub const BC6H_UFLOAT_BLOCK: Self = Format(143);
pub const BC6H_SFLOAT_BLOCK: Self = Format(144);
pub const BC7_UNORM_BLOCK: Self = Format(145);
pub const BC7_SRGB_BLOCK: Self = Format(146);
pub const ETC2_R8G8B8_UNORM_BLOCK: Self = Format(147);
pub const ETC2_R8G8B8_SRGB_BLOCK: Self = Format(148);
pub const ETC2_R8G8B8A1_UNORM_BLOCK: Self = Format(149);
pub const ETC2_R8G8B8A1_SRGB_BLOCK: Self = Format(150);
pub const ETC2_R8G8B8A8_UNORM_BLOCK: Self = Format(151);
pub const ETC2_R8G8B8A8_SRGB_BLOCK: Self = Format(152);
pub const EAC_R11_UNORM_BLOCK: Self = Format(153);
pub const EAC_R11_SNORM_BLOCK: Self = Format(154);
pub const EAC_R11G11_UNORM_BLOCK: Self = Format(155);
pub const EAC_R11G11_SNORM_BLOCK: Self = Format(156);
pub const ASTC_4X4_UNORM_BLOCK: Self = Format(157);
pub const ASTC_4X4_SRGB_BLOCK: Self = Format(158);
pub const ASTC_5X4_UNORM_BLOCK: Self = Format(159);
pub const ASTC_5X4_SRGB_BLOCK: Self = Format(160);
pub const ASTC_5X5_UNORM_BLOCK: Self = Format(161);
pub const ASTC_5X5_SRGB_BLOCK: Self = Format(162);
pub const ASTC_6X5_UNORM_BLOCK: Self = Format(163);
pub const ASTC_6X5_SRGB_BLOCK: Self = Format(164);
pub const ASTC_6X6_UNORM_BLOCK: Self = Format(165);
pub const ASTC_6X6_SRGB_BLOCK: Self = Format(166);
pub const ASTC_8X5_UNORM_BLOCK: Self = Format(167);
pub const ASTC_8X5_SRGB_BLOCK: Self = Format(168);
pub const ASTC_8X6_UNORM_BLOCK: Self = Format(169);
pub const ASTC_8X6_SRGB_BLOCK: Self = Format(170);
pub const ASTC_8X8_UNORM_BLOCK: Self = Format(171);
pub const ASTC_8X8_SRGB_BLOCK: Self = Format(172);
pub const ASTC_10X5_UNORM_BLOCK: Self = Format(173);
pub const ASTC_10X5_SRGB_BLOCK: Self = Format(174);
pub const ASTC_10X6_UNORM_BLOCK: Self = Format(175);
pub const ASTC_10X6_SRGB_BLOCK: Self = Format(176);
pub const ASTC_10X8_UNORM_BLOCK: Self = Format(177);
pub const ASTC_10X8_SRGB_BLOCK: Self = Format(178);
pub const ASTC_10X10_UNORM_BLOCK: Self = Format(179);
pub const ASTC_10X10_SRGB_BLOCK: Self = Format(180);
pub const ASTC_12X10_UNORM_BLOCK: Self = Format(181);
pub const ASTC_12X10_SRGB_BLOCK: Self = Format(182);
pub const ASTC_12X12_UNORM_BLOCK: Self = Format(183);
pub const ASTC_12X12_SRGB_BLOCK: Self = Format(184);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkStructureType.html>"]
pub struct StructureType(pub(crate) i32);
impl StructureType {
pub fn from_raw(x: i32) -> Self {
StructureType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl StructureType {
pub const APPLICATION_INFO: Self = StructureType(0);
pub const INSTANCE_CREATE_INFO: Self = StructureType(1);
pub const DEVICE_QUEUE_CREATE_INFO: Self = StructureType(2);
pub const DEVICE_CREATE_INFO: Self = StructureType(3);
pub const SUBMIT_INFO: Self = StructureType(4);
pub const MEMORY_ALLOCATE_INFO: Self = StructureType(5);
pub const MAPPED_MEMORY_RANGE: Self = StructureType(6);
pub const BIND_SPARSE_INFO: Self = StructureType(7);
pub const FENCE_CREATE_INFO: Self = StructureType(8);
pub const SEMAPHORE_CREATE_INFO: Self = StructureType(9);
pub const EVENT_CREATE_INFO: Self = StructureType(10);
pub const QUERY_POOL_CREATE_INFO: Self = StructureType(11);
pub const BUFFER_CREATE_INFO: Self = StructureType(12);
pub const BUFFER_VIEW_CREATE_INFO: Self = StructureType(13);
pub const IMAGE_CREATE_INFO: Self = StructureType(14);
pub const IMAGE_VIEW_CREATE_INFO: Self = StructureType(15);
pub const SHADER_MODULE_CREATE_INFO: Self = StructureType(16);
pub const PIPELINE_CACHE_CREATE_INFO: Self = StructureType(17);
pub const PIPELINE_SHADER_STAGE_CREATE_INFO: Self = StructureType(18);
pub const PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO: Self = StructureType(19);
pub const PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO: Self = StructureType(20);
pub const PIPELINE_TESSELLATION_STATE_CREATE_INFO: Self = StructureType(21);
pub const PIPELINE_VIEWPORT_STATE_CREATE_INFO: Self = StructureType(22);
pub const PIPELINE_RASTERIZATION_STATE_CREATE_INFO: Self = StructureType(23);
pub const PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: Self = StructureType(24);
pub const PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO: Self = StructureType(25);
pub const PIPELINE_COLOR_BLEND_STATE_CREATE_INFO: Self = StructureType(26);
pub const PIPELINE_DYNAMIC_STATE_CREATE_INFO: Self = StructureType(27);
pub const GRAPHICS_PIPELINE_CREATE_INFO: Self = StructureType(28);
pub const COMPUTE_PIPELINE_CREATE_INFO: Self = StructureType(29);
pub const PIPELINE_LAYOUT_CREATE_INFO: Self = StructureType(30);
pub const SAMPLER_CREATE_INFO: Self = StructureType(31);
pub const DESCRIPTOR_SET_LAYOUT_CREATE_INFO: Self = StructureType(32);
pub const DESCRIPTOR_POOL_CREATE_INFO: Self = StructureType(33);
pub const DESCRIPTOR_SET_ALLOCATE_INFO: Self = StructureType(34);
pub const WRITE_DESCRIPTOR_SET: Self = StructureType(35);
pub const COPY_DESCRIPTOR_SET: Self = StructureType(36);
pub const FRAMEBUFFER_CREATE_INFO: Self = StructureType(37);
pub const RENDER_PASS_CREATE_INFO: Self = StructureType(38);
pub const COMMAND_POOL_CREATE_INFO: Self = StructureType(39);
pub const COMMAND_BUFFER_ALLOCATE_INFO: Self = StructureType(40);
pub const COMMAND_BUFFER_INHERITANCE_INFO: Self = StructureType(41);
pub const COMMAND_BUFFER_BEGIN_INFO: Self = StructureType(42);
pub const RENDER_PASS_BEGIN_INFO: Self = StructureType(43);
pub const BUFFER_MEMORY_BARRIER: Self = StructureType(44);
pub const IMAGE_MEMORY_BARRIER: Self = StructureType(45);
pub const MEMORY_BARRIER: Self = StructureType(46);
#[doc = "Reserved for internal use by the loader, layers, and ICDs"]
pub const LOADER_INSTANCE_CREATE_INFO: Self = StructureType(47);
#[doc = "Reserved for internal use by the loader, layers, and ICDs"]
pub const LOADER_DEVICE_CREATE_INFO: Self = StructureType(48);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassContents.html>"]
pub struct SubpassContents(pub(crate) i32);
impl SubpassContents {
pub fn from_raw(x: i32) -> Self {
SubpassContents(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SubpassContents {
pub const INLINE: Self = SubpassContents(0);
pub const SECONDARY_COMMAND_BUFFERS: Self = SubpassContents(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkResult.html>"]
pub struct Result(pub(crate) i32);
impl Result {
pub fn from_raw(x: i32) -> Self {
Result(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl Result {
#[doc = "Command completed successfully"]
pub const SUCCESS: Self = Result(0);
#[doc = "A fence or query has not yet completed"]
pub const NOT_READY: Self = Result(1);
#[doc = "A wait operation has not completed in the specified time"]
pub const TIMEOUT: Self = Result(2);
#[doc = "An event is signaled"]
pub const EVENT_SET: Self = Result(3);
#[doc = "An event is unsignaled"]
pub const EVENT_RESET: Self = Result(4);
#[doc = "A return array was too small for the result"]
pub const INCOMPLETE: Self = Result(5);
#[doc = "A host memory allocation has failed"]
pub const ERROR_OUT_OF_HOST_MEMORY: Self = Result(-1);
#[doc = "A device memory allocation has failed"]
pub const ERROR_OUT_OF_DEVICE_MEMORY: Self = Result(-2);
#[doc = "Initialization of a object has failed"]
pub const ERROR_INITIALIZATION_FAILED: Self = Result(-3);
#[doc = "The logical device has been lost. See <<devsandqueues-lost-device>>"]
pub const ERROR_DEVICE_LOST: Self = Result(-4);
#[doc = "Mapping of a memory object has failed"]
pub const ERROR_MEMORY_MAP_FAILED: Self = Result(-5);
#[doc = "Layer specified does not exist"]
pub const ERROR_LAYER_NOT_PRESENT: Self = Result(-6);
#[doc = "Extension specified does not exist"]
pub const ERROR_EXTENSION_NOT_PRESENT: Self = Result(-7);
#[doc = "Requested feature is not available on this device"]
pub const ERROR_FEATURE_NOT_PRESENT: Self = Result(-8);
#[doc = "Unable to find a Vulkan driver"]
pub const ERROR_INCOMPATIBLE_DRIVER: Self = Result(-9);
#[doc = "Too many objects of the type have already been created"]
pub const ERROR_TOO_MANY_OBJECTS: Self = Result(-10);
#[doc = "Requested format is not supported on this device"]
pub const ERROR_FORMAT_NOT_SUPPORTED: Self = Result(-11);
#[doc = "A requested pool allocation has failed due to fragmentation of the pool\'s memory"]
pub const ERROR_FRAGMENTED_POOL: Self = Result(-12);
}
impl ::std::error::Error for Result {
fn description(&self) -> &str {
let name = match *self {
Result::SUCCESS => Some("Command completed successfully"),
Result::NOT_READY => Some("A fence or query has not yet completed"),
Result::TIMEOUT => Some("A wait operation has not completed in the specified time"),
Result::EVENT_SET => Some("An event is signaled"),
Result::EVENT_RESET => Some("An event is unsignaled"),
Result::INCOMPLETE => Some("A return array was too small for the result"),
Result::ERROR_OUT_OF_HOST_MEMORY => Some("A host memory allocation has failed"),
Result::ERROR_OUT_OF_DEVICE_MEMORY => Some("A device memory allocation has failed"),
Result::ERROR_INITIALIZATION_FAILED => Some("Initialization of a object has failed"),
Result::ERROR_DEVICE_LOST => {
Some("The logical device has been lost. See <<devsandqueues-lost-device>>")
}
Result::ERROR_MEMORY_MAP_FAILED => Some("Mapping of a memory object has failed"),
Result::ERROR_LAYER_NOT_PRESENT => Some("Layer specified does not exist"),
Result::ERROR_EXTENSION_NOT_PRESENT => Some("Extension specified does not exist"),
Result::ERROR_FEATURE_NOT_PRESENT => {
Some("Requested feature is not available on this device")
}
Result::ERROR_INCOMPATIBLE_DRIVER => Some("Unable to find a Vulkan driver"),
Result::ERROR_TOO_MANY_OBJECTS => {
Some("Too many objects of the type have already been created")
}
Result::ERROR_FORMAT_NOT_SUPPORTED => {
Some("Requested format is not supported on this device")
}
Result::ERROR_FRAGMENTED_POOL => Some(
"A requested pool allocation has failed due to fragmentation of the pool\'s memory",
),
_ => None,
};
name.unwrap_or("unknown error")
}
}
impl fmt::Display for Result {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Result::SUCCESS => Some("Command completed successfully"),
Result::NOT_READY => Some("A fence or query has not yet completed"),
Result::TIMEOUT => Some("A wait operation has not completed in the specified time"),
Result::EVENT_SET => Some("An event is signaled"),
Result::EVENT_RESET => Some("An event is unsignaled"),
Result::INCOMPLETE => Some("A return array was too small for the result"),
Result::ERROR_OUT_OF_HOST_MEMORY => Some("A host memory allocation has failed"),
Result::ERROR_OUT_OF_DEVICE_MEMORY => Some("A device memory allocation has failed"),
Result::ERROR_INITIALIZATION_FAILED => Some("Initialization of a object has failed"),
Result::ERROR_DEVICE_LOST => {
Some("The logical device has been lost. See <<devsandqueues-lost-device>>")
}
Result::ERROR_MEMORY_MAP_FAILED => Some("Mapping of a memory object has failed"),
Result::ERROR_LAYER_NOT_PRESENT => Some("Layer specified does not exist"),
Result::ERROR_EXTENSION_NOT_PRESENT => Some("Extension specified does not exist"),
Result::ERROR_FEATURE_NOT_PRESENT => {
Some("Requested feature is not available on this device")
}
Result::ERROR_INCOMPATIBLE_DRIVER => Some("Unable to find a Vulkan driver"),
Result::ERROR_TOO_MANY_OBJECTS => {
Some("Too many objects of the type have already been created")
}
Result::ERROR_FORMAT_NOT_SUPPORTED => {
Some("Requested format is not supported on this device")
}
Result::ERROR_FRAGMENTED_POOL => Some(
"A requested pool allocation has failed due to fragmentation of the pool\'s memory",
),
_ => None,
};
if let Some(x) = name {
fmt.write_str(x)
} else {
self.0.fmt(fmt)
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDynamicState.html>"]
pub struct DynamicState(pub(crate) i32);
impl DynamicState {
pub fn from_raw(x: i32) -> Self {
DynamicState(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DynamicState {
pub const VIEWPORT: Self = DynamicState(0);
pub const SCISSOR: Self = DynamicState(1);
pub const LINE_WIDTH: Self = DynamicState(2);
pub const DEPTH_BIAS: Self = DynamicState(3);
pub const BLEND_CONSTANTS: Self = DynamicState(4);
pub const DEPTH_BOUNDS: Self = DynamicState(5);
pub const STENCIL_COMPARE_MASK: Self = DynamicState(6);
pub const STENCIL_WRITE_MASK: Self = DynamicState(7);
pub const STENCIL_REFERENCE: Self = DynamicState(8);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorUpdateTemplateType.html>"]
pub struct DescriptorUpdateTemplateType(pub(crate) i32);
impl DescriptorUpdateTemplateType {
pub fn from_raw(x: i32) -> Self {
DescriptorUpdateTemplateType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DescriptorUpdateTemplateType {
#[doc = "Create descriptor update template for descriptor set updates"]
pub const DESCRIPTOR_SET: Self = DescriptorUpdateTemplateType(0);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectType.html>"]
pub struct ObjectType(pub(crate) i32);
impl ObjectType {
pub fn from_raw(x: i32) -> Self {
ObjectType(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ObjectType {
pub const UNKNOWN: Self = ObjectType(0);
#[doc = "VkInstance"]
pub const INSTANCE: Self = ObjectType(1);
#[doc = "VkPhysicalDevice"]
pub const PHYSICAL_DEVICE: Self = ObjectType(2);
#[doc = "VkDevice"]
pub const DEVICE: Self = ObjectType(3);
#[doc = "VkQueue"]
pub const QUEUE: Self = ObjectType(4);
#[doc = "VkSemaphore"]
pub const SEMAPHORE: Self = ObjectType(5);
#[doc = "VkCommandBuffer"]
pub const COMMAND_BUFFER: Self = ObjectType(6);
#[doc = "VkFence"]
pub const FENCE: Self = ObjectType(7);
#[doc = "VkDeviceMemory"]
pub const DEVICE_MEMORY: Self = ObjectType(8);
#[doc = "VkBuffer"]
pub const BUFFER: Self = ObjectType(9);
#[doc = "VkImage"]
pub const IMAGE: Self = ObjectType(10);
#[doc = "VkEvent"]
pub const EVENT: Self = ObjectType(11);
#[doc = "VkQueryPool"]
pub const QUERY_POOL: Self = ObjectType(12);
#[doc = "VkBufferView"]
pub const BUFFER_VIEW: Self = ObjectType(13);
#[doc = "VkImageView"]
pub const IMAGE_VIEW: Self = ObjectType(14);
#[doc = "VkShaderModule"]
pub const SHADER_MODULE: Self = ObjectType(15);
#[doc = "VkPipelineCache"]
pub const PIPELINE_CACHE: Self = ObjectType(16);
#[doc = "VkPipelineLayout"]
pub const PIPELINE_LAYOUT: Self = ObjectType(17);
#[doc = "VkRenderPass"]
pub const RENDER_PASS: Self = ObjectType(18);
#[doc = "VkPipeline"]
pub const PIPELINE: Self = ObjectType(19);
#[doc = "VkDescriptorSetLayout"]
pub const DESCRIPTOR_SET_LAYOUT: Self = ObjectType(20);
#[doc = "VkSampler"]
pub const SAMPLER: Self = ObjectType(21);
#[doc = "VkDescriptorPool"]
pub const DESCRIPTOR_POOL: Self = ObjectType(22);
#[doc = "VkDescriptorSet"]
pub const DESCRIPTOR_SET: Self = ObjectType(23);
#[doc = "VkFramebuffer"]
pub const FRAMEBUFFER: Self = ObjectType(24);
#[doc = "VkCommandPool"]
pub const COMMAND_POOL: Self = ObjectType(25);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPresentModeKHR.html>"]
pub struct PresentModeKHR(pub(crate) i32);
impl PresentModeKHR {
pub fn from_raw(x: i32) -> Self {
PresentModeKHR(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PresentModeKHR {
pub const IMMEDIATE: Self = PresentModeKHR(0);
pub const MAILBOX: Self = PresentModeKHR(1);
pub const FIFO: Self = PresentModeKHR(2);
pub const FIFO_RELAXED: Self = PresentModeKHR(3);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkColorSpaceKHR.html>"]
pub struct ColorSpaceKHR(pub(crate) i32);
impl ColorSpaceKHR {
pub fn from_raw(x: i32) -> Self {
ColorSpaceKHR(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ColorSpaceKHR {
pub const SRGB_NONLINEAR: Self = ColorSpaceKHR(0);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkTimeDomainEXT.html>"]
pub struct TimeDomainEXT(pub(crate) i32);
impl TimeDomainEXT {
pub fn from_raw(x: i32) -> Self {
TimeDomainEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl TimeDomainEXT {
pub const DEVICE: Self = TimeDomainEXT(0);
pub const CLOCK_MONOTONIC: Self = TimeDomainEXT(1);
pub const CLOCK_MONOTONIC_RAW: Self = TimeDomainEXT(2);
pub const QUERY_PERFORMANCE_COUNTER: Self = TimeDomainEXT(3);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugReportObjectTypeEXT.html>"]
pub struct DebugReportObjectTypeEXT(pub(crate) i32);
impl DebugReportObjectTypeEXT {
pub fn from_raw(x: i32) -> Self {
DebugReportObjectTypeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DebugReportObjectTypeEXT {
pub const UNKNOWN: Self = DebugReportObjectTypeEXT(0);
pub const INSTANCE: Self = DebugReportObjectTypeEXT(1);
pub const PHYSICAL_DEVICE: Self = DebugReportObjectTypeEXT(2);
pub const DEVICE: Self = DebugReportObjectTypeEXT(3);
pub const QUEUE: Self = DebugReportObjectTypeEXT(4);
pub const SEMAPHORE: Self = DebugReportObjectTypeEXT(5);
pub const COMMAND_BUFFER: Self = DebugReportObjectTypeEXT(6);
pub const FENCE: Self = DebugReportObjectTypeEXT(7);
pub const DEVICE_MEMORY: Self = DebugReportObjectTypeEXT(8);
pub const BUFFER: Self = DebugReportObjectTypeEXT(9);
pub const IMAGE: Self = DebugReportObjectTypeEXT(10);
pub const EVENT: Self = DebugReportObjectTypeEXT(11);
pub const QUERY_POOL: Self = DebugReportObjectTypeEXT(12);
pub const BUFFER_VIEW: Self = DebugReportObjectTypeEXT(13);
pub const IMAGE_VIEW: Self = DebugReportObjectTypeEXT(14);
pub const SHADER_MODULE: Self = DebugReportObjectTypeEXT(15);
pub const PIPELINE_CACHE: Self = DebugReportObjectTypeEXT(16);
pub const PIPELINE_LAYOUT: Self = DebugReportObjectTypeEXT(17);
pub const RENDER_PASS: Self = DebugReportObjectTypeEXT(18);
pub const PIPELINE: Self = DebugReportObjectTypeEXT(19);
pub const DESCRIPTOR_SET_LAYOUT: Self = DebugReportObjectTypeEXT(20);
pub const SAMPLER: Self = DebugReportObjectTypeEXT(21);
pub const DESCRIPTOR_POOL: Self = DebugReportObjectTypeEXT(22);
pub const DESCRIPTOR_SET: Self = DebugReportObjectTypeEXT(23);
pub const FRAMEBUFFER: Self = DebugReportObjectTypeEXT(24);
pub const COMMAND_POOL: Self = DebugReportObjectTypeEXT(25);
pub const SURFACE_KHR: Self = DebugReportObjectTypeEXT(26);
pub const SWAPCHAIN_KHR: Self = DebugReportObjectTypeEXT(27);
pub const DEBUG_REPORT_CALLBACK: Self = DebugReportObjectTypeEXT(28);
pub const DISPLAY_KHR: Self = DebugReportObjectTypeEXT(29);
pub const DISPLAY_MODE_KHR: Self = DebugReportObjectTypeEXT(30);
pub const OBJECT_TABLE_NVX: Self = DebugReportObjectTypeEXT(31);
pub const INDIRECT_COMMANDS_LAYOUT_NVX: Self = DebugReportObjectTypeEXT(32);
pub const VALIDATION_CACHE: Self = DebugReportObjectTypeEXT(33);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRasterizationOrderAMD.html>"]
pub struct RasterizationOrderAMD(pub(crate) i32);
impl RasterizationOrderAMD {
pub fn from_raw(x: i32) -> Self {
RasterizationOrderAMD(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl RasterizationOrderAMD {
pub const STRICT: Self = RasterizationOrderAMD(0);
pub const RELAXED: Self = RasterizationOrderAMD(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationCheckEXT.html>"]
pub struct ValidationCheckEXT(pub(crate) i32);
impl ValidationCheckEXT {
pub fn from_raw(x: i32) -> Self {
ValidationCheckEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ValidationCheckEXT {
pub const ALL: Self = ValidationCheckEXT(0);
pub const SHADERS: Self = ValidationCheckEXT(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationFeatureEnableEXT.html>"]
pub struct ValidationFeatureEnableEXT(pub(crate) i32);
impl ValidationFeatureEnableEXT {
pub fn from_raw(x: i32) -> Self {
ValidationFeatureEnableEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ValidationFeatureEnableEXT {
pub const GPU_ASSISTED: Self = ValidationFeatureEnableEXT(0);
pub const GPU_ASSISTED_RESERVE_BINDING_SLOT: Self = ValidationFeatureEnableEXT(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationFeatureDisableEXT.html>"]
pub struct ValidationFeatureDisableEXT(pub(crate) i32);
impl ValidationFeatureDisableEXT {
pub fn from_raw(x: i32) -> Self {
ValidationFeatureDisableEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ValidationFeatureDisableEXT {
pub const ALL: Self = ValidationFeatureDisableEXT(0);
pub const SHADERS: Self = ValidationFeatureDisableEXT(1);
pub const THREAD_SAFETY: Self = ValidationFeatureDisableEXT(2);
pub const API_PARAMETERS: Self = ValidationFeatureDisableEXT(3);
pub const OBJECT_LIFETIMES: Self = ValidationFeatureDisableEXT(4);
pub const CORE_CHECKS: Self = ValidationFeatureDisableEXT(5);
pub const UNIQUE_HANDLES: Self = ValidationFeatureDisableEXT(6);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsTokenTypeNVX.html>"]
pub struct IndirectCommandsTokenTypeNVX(pub(crate) i32);
impl IndirectCommandsTokenTypeNVX {
pub fn from_raw(x: i32) -> Self {
IndirectCommandsTokenTypeNVX(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl IndirectCommandsTokenTypeNVX {
pub const PIPELINE: Self = IndirectCommandsTokenTypeNVX(0);
pub const DESCRIPTOR_SET: Self = IndirectCommandsTokenTypeNVX(1);
pub const INDEX_BUFFER: Self = IndirectCommandsTokenTypeNVX(2);
pub const VERTEX_BUFFER: Self = IndirectCommandsTokenTypeNVX(3);
pub const PUSH_CONSTANT: Self = IndirectCommandsTokenTypeNVX(4);
pub const DRAW_INDEXED: Self = IndirectCommandsTokenTypeNVX(5);
pub const DRAW: Self = IndirectCommandsTokenTypeNVX(6);
pub const DISPATCH: Self = IndirectCommandsTokenTypeNVX(7);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectEntryTypeNVX.html>"]
pub struct ObjectEntryTypeNVX(pub(crate) i32);
impl ObjectEntryTypeNVX {
pub fn from_raw(x: i32) -> Self {
ObjectEntryTypeNVX(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ObjectEntryTypeNVX {
pub const DESCRIPTOR_SET: Self = ObjectEntryTypeNVX(0);
pub const PIPELINE: Self = ObjectEntryTypeNVX(1);
pub const INDEX_BUFFER: Self = ObjectEntryTypeNVX(2);
pub const VERTEX_BUFFER: Self = ObjectEntryTypeNVX(3);
pub const PUSH_CONSTANT: Self = ObjectEntryTypeNVX(4);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPowerStateEXT.html>"]
pub struct DisplayPowerStateEXT(pub(crate) i32);
impl DisplayPowerStateEXT {
pub fn from_raw(x: i32) -> Self {
DisplayPowerStateEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DisplayPowerStateEXT {
pub const OFF: Self = DisplayPowerStateEXT(0);
pub const SUSPEND: Self = DisplayPowerStateEXT(1);
pub const ON: Self = DisplayPowerStateEXT(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceEventTypeEXT.html>"]
pub struct DeviceEventTypeEXT(pub(crate) i32);
impl DeviceEventTypeEXT {
pub fn from_raw(x: i32) -> Self {
DeviceEventTypeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DeviceEventTypeEXT {
pub const DISPLAY_HOTPLUG: Self = DeviceEventTypeEXT(0);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayEventTypeEXT.html>"]
pub struct DisplayEventTypeEXT(pub(crate) i32);
impl DisplayEventTypeEXT {
pub fn from_raw(x: i32) -> Self {
DisplayEventTypeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DisplayEventTypeEXT {
pub const FIRST_PIXEL_OUT: Self = DisplayEventTypeEXT(0);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkViewportCoordinateSwizzleNV.html>"]
pub struct ViewportCoordinateSwizzleNV(pub(crate) i32);
impl ViewportCoordinateSwizzleNV {
pub fn from_raw(x: i32) -> Self {
ViewportCoordinateSwizzleNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ViewportCoordinateSwizzleNV {
pub const POSITIVE_X: Self = ViewportCoordinateSwizzleNV(0);
pub const NEGATIVE_X: Self = ViewportCoordinateSwizzleNV(1);
pub const POSITIVE_Y: Self = ViewportCoordinateSwizzleNV(2);
pub const NEGATIVE_Y: Self = ViewportCoordinateSwizzleNV(3);
pub const POSITIVE_Z: Self = ViewportCoordinateSwizzleNV(4);
pub const NEGATIVE_Z: Self = ViewportCoordinateSwizzleNV(5);
pub const POSITIVE_W: Self = ViewportCoordinateSwizzleNV(6);
pub const NEGATIVE_W: Self = ViewportCoordinateSwizzleNV(7);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDiscardRectangleModeEXT.html>"]
pub struct DiscardRectangleModeEXT(pub(crate) i32);
impl DiscardRectangleModeEXT {
pub fn from_raw(x: i32) -> Self {
DiscardRectangleModeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DiscardRectangleModeEXT {
pub const INCLUSIVE: Self = DiscardRectangleModeEXT(0);
pub const EXCLUSIVE: Self = DiscardRectangleModeEXT(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPointClippingBehavior.html>"]
pub struct PointClippingBehavior(pub(crate) i32);
impl PointClippingBehavior {
pub fn from_raw(x: i32) -> Self {
PointClippingBehavior(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl PointClippingBehavior {
pub const ALL_CLIP_PLANES: Self = PointClippingBehavior(0);
pub const USER_CLIP_PLANES_ONLY: Self = PointClippingBehavior(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerReductionModeEXT.html>"]
pub struct SamplerReductionModeEXT(pub(crate) i32);
impl SamplerReductionModeEXT {
pub fn from_raw(x: i32) -> Self {
SamplerReductionModeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SamplerReductionModeEXT {
pub const WEIGHTED_AVERAGE: Self = SamplerReductionModeEXT(0);
pub const MIN: Self = SamplerReductionModeEXT(1);
pub const MAX: Self = SamplerReductionModeEXT(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkTessellationDomainOrigin.html>"]
pub struct TessellationDomainOrigin(pub(crate) i32);
impl TessellationDomainOrigin {
pub fn from_raw(x: i32) -> Self {
TessellationDomainOrigin(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl TessellationDomainOrigin {
pub const UPPER_LEFT: Self = TessellationDomainOrigin(0);
pub const LOWER_LEFT: Self = TessellationDomainOrigin(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrModelConversion.html>"]
pub struct SamplerYcbcrModelConversion(pub(crate) i32);
impl SamplerYcbcrModelConversion {
pub fn from_raw(x: i32) -> Self {
SamplerYcbcrModelConversion(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SamplerYcbcrModelConversion {
pub const RGB_IDENTITY: Self = SamplerYcbcrModelConversion(0);
#[doc = "just range expansion"]
pub const YCBCR_IDENTITY: Self = SamplerYcbcrModelConversion(1);
#[doc = "aka HD YUV"]
pub const YCBCR_709: Self = SamplerYcbcrModelConversion(2);
#[doc = "aka SD YUV"]
pub const YCBCR_601: Self = SamplerYcbcrModelConversion(3);
#[doc = "aka UHD YUV"]
pub const YCBCR_2020: Self = SamplerYcbcrModelConversion(4);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerYcbcrRange.html>"]
pub struct SamplerYcbcrRange(pub(crate) i32);
impl SamplerYcbcrRange {
pub fn from_raw(x: i32) -> Self {
SamplerYcbcrRange(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl SamplerYcbcrRange {
#[doc = "Luma 0..1 maps to 0..255, chroma -0.5..0.5 to 1..255 (clamped)"]
pub const ITU_FULL: Self = SamplerYcbcrRange(0);
#[doc = "Luma 0..1 maps to 16..235, chroma -0.5..0.5 to 16..240"]
pub const ITU_NARROW: Self = SamplerYcbcrRange(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkChromaLocation.html>"]
pub struct ChromaLocation(pub(crate) i32);
impl ChromaLocation {
pub fn from_raw(x: i32) -> Self {
ChromaLocation(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ChromaLocation {
pub const COSITED_EVEN: Self = ChromaLocation(0);
pub const MIDPOINT: Self = ChromaLocation(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBlendOverlapEXT.html>"]
pub struct BlendOverlapEXT(pub(crate) i32);
impl BlendOverlapEXT {
pub fn from_raw(x: i32) -> Self {
BlendOverlapEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl BlendOverlapEXT {
pub const UNCORRELATED: Self = BlendOverlapEXT(0);
pub const DISJOINT: Self = BlendOverlapEXT(1);
pub const CONJOINT: Self = BlendOverlapEXT(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCoverageModulationModeNV.html>"]
pub struct CoverageModulationModeNV(pub(crate) i32);
impl CoverageModulationModeNV {
pub fn from_raw(x: i32) -> Self {
CoverageModulationModeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl CoverageModulationModeNV {
pub const NONE: Self = CoverageModulationModeNV(0);
pub const RGB: Self = CoverageModulationModeNV(1);
pub const ALPHA: Self = CoverageModulationModeNV(2);
pub const RGBA: Self = CoverageModulationModeNV(3);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkValidationCacheHeaderVersionEXT.html>"]
pub struct ValidationCacheHeaderVersionEXT(pub(crate) i32);
impl ValidationCacheHeaderVersionEXT {
pub fn from_raw(x: i32) -> Self {
ValidationCacheHeaderVersionEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ValidationCacheHeaderVersionEXT {
pub const ONE: Self = ValidationCacheHeaderVersionEXT(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderInfoTypeAMD.html>"]
pub struct ShaderInfoTypeAMD(pub(crate) i32);
impl ShaderInfoTypeAMD {
pub fn from_raw(x: i32) -> Self {
ShaderInfoTypeAMD(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ShaderInfoTypeAMD {
pub const STATISTICS: Self = ShaderInfoTypeAMD(0);
pub const BINARY: Self = ShaderInfoTypeAMD(1);
pub const DISASSEMBLY: Self = ShaderInfoTypeAMD(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueueGlobalPriorityEXT.html>"]
pub struct QueueGlobalPriorityEXT(pub(crate) i32);
impl QueueGlobalPriorityEXT {
pub fn from_raw(x: i32) -> Self {
QueueGlobalPriorityEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl QueueGlobalPriorityEXT {
pub const LOW: Self = QueueGlobalPriorityEXT(128);
pub const MEDIUM: Self = QueueGlobalPriorityEXT(256);
pub const HIGH: Self = QueueGlobalPriorityEXT(512);
pub const REALTIME: Self = QueueGlobalPriorityEXT(1024);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkConservativeRasterizationModeEXT.html>"]
pub struct ConservativeRasterizationModeEXT(pub(crate) i32);
impl ConservativeRasterizationModeEXT {
pub fn from_raw(x: i32) -> Self {
ConservativeRasterizationModeEXT(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ConservativeRasterizationModeEXT {
pub const DISABLED: Self = ConservativeRasterizationModeEXT(0);
pub const OVERESTIMATE: Self = ConservativeRasterizationModeEXT(1);
pub const UNDERESTIMATE: Self = ConservativeRasterizationModeEXT(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkVendorId.html>"]
pub struct VendorId(pub(crate) i32);
impl VendorId {
pub fn from_raw(x: i32) -> Self {
VendorId(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl VendorId {
#[doc = "Vivante vendor ID"]
pub const VIV: Self = VendorId(0x10001);
#[doc = "VeriSilicon vendor ID"]
pub const VSI: Self = VendorId(0x10002);
#[doc = "Kazan Software Renderer"]
pub const KAZAN: Self = VendorId(0x10003);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDriverIdKHR.html>"]
pub struct DriverIdKHR(pub(crate) i32);
impl DriverIdKHR {
pub fn from_raw(x: i32) -> Self {
DriverIdKHR(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl DriverIdKHR {
#[doc = "Advanced Micro Devices, Inc."]
pub const AMD_PROPRIETARY: Self = DriverIdKHR(1);
#[doc = "Advanced Micro Devices, Inc."]
pub const AMD_OPEN_SOURCE: Self = DriverIdKHR(2);
#[doc = "Mesa open source project"]
pub const MESA_RADV: Self = DriverIdKHR(3);
#[doc = "NVIDIA Corporation"]
pub const NVIDIA_PROPRIETARY: Self = DriverIdKHR(4);
#[doc = "Intel Corporation"]
pub const INTEL_PROPRIETARY_WINDOWS: Self = DriverIdKHR(5);
#[doc = "Intel Corporation"]
pub const INTEL_OPEN_SOURCE_MESA: Self = DriverIdKHR(6);
#[doc = "Imagination Technologies"]
pub const IMAGINATION_PROPRIETARY: Self = DriverIdKHR(7);
#[doc = "Qualcomm Technologies, Inc."]
pub const QUALCOMM_PROPRIETARY: Self = DriverIdKHR(8);
#[doc = "Arm Limited"]
pub const ARM_PROPRIETARY: Self = DriverIdKHR(9);
#[doc = "Google LLC"]
pub const GOOGLE_PASTEL: Self = DriverIdKHR(10);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShadingRatePaletteEntryNV.html>"]
pub struct ShadingRatePaletteEntryNV(pub(crate) i32);
impl ShadingRatePaletteEntryNV {
pub fn from_raw(x: i32) -> Self {
ShadingRatePaletteEntryNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl ShadingRatePaletteEntryNV {
pub const NO_INVOCATIONS: Self = ShadingRatePaletteEntryNV(0);
pub const TYPE_16_INVOCATIONS_PER_PIXEL: Self = ShadingRatePaletteEntryNV(1);
pub const TYPE_8_INVOCATIONS_PER_PIXEL: Self = ShadingRatePaletteEntryNV(2);
pub const TYPE_4_INVOCATIONS_PER_PIXEL: Self = ShadingRatePaletteEntryNV(3);
pub const TYPE_2_INVOCATIONS_PER_PIXEL: Self = ShadingRatePaletteEntryNV(4);
pub const TYPE_1_INVOCATION_PER_PIXEL: Self = ShadingRatePaletteEntryNV(5);
pub const TYPE_1_INVOCATION_PER_2X1_PIXELS: Self = ShadingRatePaletteEntryNV(6);
pub const TYPE_1_INVOCATION_PER_1X2_PIXELS: Self = ShadingRatePaletteEntryNV(7);
pub const TYPE_1_INVOCATION_PER_2X2_PIXELS: Self = ShadingRatePaletteEntryNV(8);
pub const TYPE_1_INVOCATION_PER_4X2_PIXELS: Self = ShadingRatePaletteEntryNV(9);
pub const TYPE_1_INVOCATION_PER_2X4_PIXELS: Self = ShadingRatePaletteEntryNV(10);
pub const TYPE_1_INVOCATION_PER_4X4_PIXELS: Self = ShadingRatePaletteEntryNV(11);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCoarseSampleOrderTypeNV.html>"]
pub struct CoarseSampleOrderTypeNV(pub(crate) i32);
impl CoarseSampleOrderTypeNV {
pub fn from_raw(x: i32) -> Self {
CoarseSampleOrderTypeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl CoarseSampleOrderTypeNV {
pub const DEFAULT: Self = CoarseSampleOrderTypeNV(0);
pub const CUSTOM: Self = CoarseSampleOrderTypeNV(1);
pub const PIXEL_MAJOR: Self = CoarseSampleOrderTypeNV(2);
pub const SAMPLE_MAJOR: Self = CoarseSampleOrderTypeNV(3);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCopyAccelerationStructureModeNV.html>"]
pub struct CopyAccelerationStructureModeNV(pub(crate) i32);
impl CopyAccelerationStructureModeNV {
pub fn from_raw(x: i32) -> Self {
CopyAccelerationStructureModeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl CopyAccelerationStructureModeNV {
pub const CLONE: Self = CopyAccelerationStructureModeNV(0);
pub const COMPACT: Self = CopyAccelerationStructureModeNV(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureTypeNV.html>"]
pub struct AccelerationStructureTypeNV(pub(crate) i32);
impl AccelerationStructureTypeNV {
pub fn from_raw(x: i32) -> Self {
AccelerationStructureTypeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl AccelerationStructureTypeNV {
pub const TOP_LEVEL: Self = AccelerationStructureTypeNV(0);
pub const BOTTOM_LEVEL: Self = AccelerationStructureTypeNV(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryTypeNV.html>"]
pub struct GeometryTypeNV(pub(crate) i32);
impl GeometryTypeNV {
pub fn from_raw(x: i32) -> Self {
GeometryTypeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl GeometryTypeNV {
pub const TRIANGLES: Self = GeometryTypeNV(0);
pub const AABBS: Self = GeometryTypeNV(1);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccelerationStructureMemoryRequirementsTypeNV.html>"]
pub struct AccelerationStructureMemoryRequirementsTypeNV(pub(crate) i32);
impl AccelerationStructureMemoryRequirementsTypeNV {
pub fn from_raw(x: i32) -> Self {
AccelerationStructureMemoryRequirementsTypeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl AccelerationStructureMemoryRequirementsTypeNV {
pub const OBJECT: Self = AccelerationStructureMemoryRequirementsTypeNV(0);
pub const BUILD_SCRATCH: Self = AccelerationStructureMemoryRequirementsTypeNV(1);
pub const UPDATE_SCRATCH: Self = AccelerationStructureMemoryRequirementsTypeNV(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRayTracingShaderGroupTypeNV.html>"]
pub struct RayTracingShaderGroupTypeNV(pub(crate) i32);
impl RayTracingShaderGroupTypeNV {
pub fn from_raw(x: i32) -> Self {
RayTracingShaderGroupTypeNV(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl RayTracingShaderGroupTypeNV {
pub const GENERAL: Self = RayTracingShaderGroupTypeNV(0);
pub const TRIANGLES_HIT_GROUP: Self = RayTracingShaderGroupTypeNV(1);
pub const PROCEDURAL_HIT_GROUP: Self = RayTracingShaderGroupTypeNV(2);
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryOverallocationBehaviorAMD.html>"]
pub struct MemoryOverallocationBehaviorAMD(pub(crate) i32);
impl MemoryOverallocationBehaviorAMD {
pub fn from_raw(x: i32) -> Self {
MemoryOverallocationBehaviorAMD(x)
}
pub fn as_raw(self) -> i32 {
self.0
}
}
impl MemoryOverallocationBehaviorAMD {
pub const DEFAULT: Self = MemoryOverallocationBehaviorAMD(0);
pub const ALLOWED: Self = MemoryOverallocationBehaviorAMD(1);
pub const DISALLOWED: Self = MemoryOverallocationBehaviorAMD(2);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCullModeFlagBits.html>"]
pub struct CullModeFlags(pub(crate) Flags);
vk_bitflags_wrapped!(CullModeFlags, 0b11, Flags);
impl CullModeFlags {
pub const NONE: Self = CullModeFlags(0);
pub const FRONT: Self = CullModeFlags(0b1);
pub const BACK: Self = CullModeFlags(0b10);
pub const FRONT_AND_BACK: Self = CullModeFlags(0x00000003);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueueFlagBits.html>"]
pub struct QueueFlags(pub(crate) Flags);
vk_bitflags_wrapped!(QueueFlags, 0b1111, Flags);
impl QueueFlags {
#[doc = "Queue supports graphics operations"]
pub const GRAPHICS: Self = QueueFlags(0b1);
#[doc = "Queue supports compute operations"]
pub const COMPUTE: Self = QueueFlags(0b10);
#[doc = "Queue supports transfer operations"]
pub const TRANSFER: Self = QueueFlags(0b100);
#[doc = "Queue supports sparse resource memory management operations"]
pub const SPARSE_BINDING: Self = QueueFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkRenderPassCreateFlagBits.html>"]
pub struct RenderPassCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(RenderPassCreateFlags, 0b0, Flags);
impl RenderPassCreateFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceQueueCreateFlagBits.html>"]
pub struct DeviceQueueCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(DeviceQueueCreateFlags, 0b0, Flags);
impl DeviceQueueCreateFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryPropertyFlagBits.html>"]
pub struct MemoryPropertyFlags(pub(crate) Flags);
vk_bitflags_wrapped!(MemoryPropertyFlags, 0b11111, Flags);
impl MemoryPropertyFlags {
#[doc = "If otherwise stated, then allocate memory on device"]
pub const DEVICE_LOCAL: Self = MemoryPropertyFlags(0b1);
#[doc = "Memory is mappable by host"]
pub const HOST_VISIBLE: Self = MemoryPropertyFlags(0b10);
#[doc = "Memory will have i/o coherency. If not set, application may need to use vkFlushMappedMemoryRanges and vkInvalidateMappedMemoryRanges to flush/invalidate host cache"]
pub const HOST_COHERENT: Self = MemoryPropertyFlags(0b100);
#[doc = "Memory will be cached by the host"]
pub const HOST_CACHED: Self = MemoryPropertyFlags(0b1000);
#[doc = "Memory may be allocated by the driver when it is required"]
pub const LAZILY_ALLOCATED: Self = MemoryPropertyFlags(0b10000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryHeapFlagBits.html>"]
pub struct MemoryHeapFlags(pub(crate) Flags);
vk_bitflags_wrapped!(MemoryHeapFlags, 0b1, Flags);
impl MemoryHeapFlags {
#[doc = "If set, heap represents device memory"]
pub const DEVICE_LOCAL: Self = MemoryHeapFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAccessFlagBits.html>"]
pub struct AccessFlags(pub(crate) Flags);
vk_bitflags_wrapped!(AccessFlags, 0b11111111111111111, Flags);
impl AccessFlags {
#[doc = "Controls coherency of indirect command reads"]
pub const INDIRECT_COMMAND_READ: Self = AccessFlags(0b1);
#[doc = "Controls coherency of index reads"]
pub const INDEX_READ: Self = AccessFlags(0b10);
#[doc = "Controls coherency of vertex attribute reads"]
pub const VERTEX_ATTRIBUTE_READ: Self = AccessFlags(0b100);
#[doc = "Controls coherency of uniform buffer reads"]
pub const UNIFORM_READ: Self = AccessFlags(0b1000);
#[doc = "Controls coherency of input attachment reads"]
pub const INPUT_ATTACHMENT_READ: Self = AccessFlags(0b10000);
#[doc = "Controls coherency of shader reads"]
pub const SHADER_READ: Self = AccessFlags(0b100000);
#[doc = "Controls coherency of shader writes"]
pub const SHADER_WRITE: Self = AccessFlags(0b1000000);
#[doc = "Controls coherency of color attachment reads"]
pub const COLOR_ATTACHMENT_READ: Self = AccessFlags(0b10000000);
#[doc = "Controls coherency of color attachment writes"]
pub const COLOR_ATTACHMENT_WRITE: Self = AccessFlags(0b100000000);
#[doc = "Controls coherency of depth/stencil attachment reads"]
pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = AccessFlags(0b1000000000);
#[doc = "Controls coherency of depth/stencil attachment writes"]
pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = AccessFlags(0b10000000000);
#[doc = "Controls coherency of transfer reads"]
pub const TRANSFER_READ: Self = AccessFlags(0b100000000000);
#[doc = "Controls coherency of transfer writes"]
pub const TRANSFER_WRITE: Self = AccessFlags(0b1000000000000);
#[doc = "Controls coherency of host reads"]
pub const HOST_READ: Self = AccessFlags(0b10000000000000);
#[doc = "Controls coherency of host writes"]
pub const HOST_WRITE: Self = AccessFlags(0b100000000000000);
#[doc = "Controls coherency of memory reads"]
pub const MEMORY_READ: Self = AccessFlags(0b1000000000000000);
#[doc = "Controls coherency of memory writes"]
pub const MEMORY_WRITE: Self = AccessFlags(0b10000000000000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferUsageFlagBits.html>"]
pub struct BufferUsageFlags(pub(crate) Flags);
vk_bitflags_wrapped!(BufferUsageFlags, 0b111111111, Flags);
impl BufferUsageFlags {
#[doc = "Can be used as a source of transfer operations"]
pub const TRANSFER_SRC: Self = BufferUsageFlags(0b1);
#[doc = "Can be used as a destination of transfer operations"]
pub const TRANSFER_DST: Self = BufferUsageFlags(0b10);
#[doc = "Can be used as TBO"]
pub const UNIFORM_TEXEL_BUFFER: Self = BufferUsageFlags(0b100);
#[doc = "Can be used as IBO"]
pub const STORAGE_TEXEL_BUFFER: Self = BufferUsageFlags(0b1000);
#[doc = "Can be used as UBO"]
pub const UNIFORM_BUFFER: Self = BufferUsageFlags(0b10000);
#[doc = "Can be used as SSBO"]
pub const STORAGE_BUFFER: Self = BufferUsageFlags(0b100000);
#[doc = "Can be used as source of fixed-function index fetch (index buffer)"]
pub const INDEX_BUFFER: Self = BufferUsageFlags(0b1000000);
#[doc = "Can be used as source of fixed-function vertex fetch (VBO)"]
pub const VERTEX_BUFFER: Self = BufferUsageFlags(0b10000000);
#[doc = "Can be the source of indirect parameters (e.g. indirect buffer, parameter buffer)"]
pub const INDIRECT_BUFFER: Self = BufferUsageFlags(0b100000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBufferCreateFlagBits.html>"]
pub struct BufferCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(BufferCreateFlags, 0b111, Flags);
impl BufferCreateFlags {
#[doc = "Buffer should support sparse backing"]
pub const SPARSE_BINDING: Self = BufferCreateFlags(0b1);
#[doc = "Buffer should support sparse backing with partial residency"]
pub const SPARSE_RESIDENCY: Self = BufferCreateFlags(0b10);
#[doc = "Buffer should support constent data access to physical memory ranges mapped into multiple locations of sparse buffers"]
pub const SPARSE_ALIASED: Self = BufferCreateFlags(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderStageFlagBits.html>"]
pub struct ShaderStageFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ShaderStageFlags, 0b1111111111111111111111111111111, Flags);
impl ShaderStageFlags {
pub const VERTEX: Self = ShaderStageFlags(0b1);
pub const TESSELLATION_CONTROL: Self = ShaderStageFlags(0b10);
pub const TESSELLATION_EVALUATION: Self = ShaderStageFlags(0b100);
pub const GEOMETRY: Self = ShaderStageFlags(0b1000);
pub const FRAGMENT: Self = ShaderStageFlags(0b10000);
pub const COMPUTE: Self = ShaderStageFlags(0b100000);
pub const ALL_GRAPHICS: Self = ShaderStageFlags(0x0000001F);
pub const ALL: Self = ShaderStageFlags(0x7FFFFFFF);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageUsageFlagBits.html>"]
pub struct ImageUsageFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ImageUsageFlags, 0b11111111, Flags);
impl ImageUsageFlags {
#[doc = "Can be used as a source of transfer operations"]
pub const TRANSFER_SRC: Self = ImageUsageFlags(0b1);
#[doc = "Can be used as a destination of transfer operations"]
pub const TRANSFER_DST: Self = ImageUsageFlags(0b10);
#[doc = "Can be sampled from (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"]
pub const SAMPLED: Self = ImageUsageFlags(0b100);
#[doc = "Can be used as storage image (STORAGE_IMAGE descriptor type)"]
pub const STORAGE: Self = ImageUsageFlags(0b1000);
#[doc = "Can be used as framebuffer color attachment"]
pub const COLOR_ATTACHMENT: Self = ImageUsageFlags(0b10000);
#[doc = "Can be used as framebuffer depth/stencil attachment"]
pub const DEPTH_STENCIL_ATTACHMENT: Self = ImageUsageFlags(0b100000);
#[doc = "Image data not needed outside of rendering"]
pub const TRANSIENT_ATTACHMENT: Self = ImageUsageFlags(0b1000000);
#[doc = "Can be used as framebuffer input attachment"]
pub const INPUT_ATTACHMENT: Self = ImageUsageFlags(0b10000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageCreateFlagBits.html>"]
pub struct ImageCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ImageCreateFlags, 0b11111, Flags);
impl ImageCreateFlags {
#[doc = "Image should support sparse backing"]
pub const SPARSE_BINDING: Self = ImageCreateFlags(0b1);
#[doc = "Image should support sparse backing with partial residency"]
pub const SPARSE_RESIDENCY: Self = ImageCreateFlags(0b10);
#[doc = "Image should support constent data access to physical memory ranges mapped into multiple locations of sparse images"]
pub const SPARSE_ALIASED: Self = ImageCreateFlags(0b100);
#[doc = "Allows image views to have different format than the base image"]
pub const MUTABLE_FORMAT: Self = ImageCreateFlags(0b1000);
#[doc = "Allows creating image views with cube type from the created image"]
pub const CUBE_COMPATIBLE: Self = ImageCreateFlags(0b10000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageViewCreateFlagBits.html>"]
pub struct ImageViewCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ImageViewCreateFlags, 0b0, Flags);
impl ImageViewCreateFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSamplerCreateFlagBits.html>"]
pub struct SamplerCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SamplerCreateFlags, 0b0, Flags);
impl SamplerCreateFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineCreateFlagBits.html>"]
pub struct PipelineCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(PipelineCreateFlags, 0b111, Flags);
impl PipelineCreateFlags {
pub const DISABLE_OPTIMIZATION: Self = PipelineCreateFlags(0b1);
pub const ALLOW_DERIVATIVES: Self = PipelineCreateFlags(0b10);
pub const DERIVATIVE: Self = PipelineCreateFlags(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkColorComponentFlagBits.html>"]
pub struct ColorComponentFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ColorComponentFlags, 0b1111, Flags);
impl ColorComponentFlags {
pub const R: Self = ColorComponentFlags(0b1);
pub const G: Self = ColorComponentFlags(0b10);
pub const B: Self = ColorComponentFlags(0b100);
pub const A: Self = ColorComponentFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFenceCreateFlagBits.html>"]
pub struct FenceCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(FenceCreateFlags, 0b1, Flags);
impl FenceCreateFlags {
pub const SIGNALED: Self = FenceCreateFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFormatFeatureFlagBits.html>"]
pub struct FormatFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(FormatFeatureFlags, 0b1111111111111, Flags);
impl FormatFeatureFlags {
#[doc = "Format can be used for sampled images (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"]
pub const SAMPLED_IMAGE: Self = FormatFeatureFlags(0b1);
#[doc = "Format can be used for storage images (STORAGE_IMAGE descriptor type)"]
pub const STORAGE_IMAGE: Self = FormatFeatureFlags(0b10);
#[doc = "Format supports atomic operations in case it is used for storage images"]
pub const STORAGE_IMAGE_ATOMIC: Self = FormatFeatureFlags(0b100);
#[doc = "Format can be used for uniform texel buffers (TBOs)"]
pub const UNIFORM_TEXEL_BUFFER: Self = FormatFeatureFlags(0b1000);
#[doc = "Format can be used for storage texel buffers (IBOs)"]
pub const STORAGE_TEXEL_BUFFER: Self = FormatFeatureFlags(0b10000);
#[doc = "Format supports atomic operations in case it is used for storage texel buffers"]
pub const STORAGE_TEXEL_BUFFER_ATOMIC: Self = FormatFeatureFlags(0b100000);
#[doc = "Format can be used for vertex buffers (VBOs)"]
pub const VERTEX_BUFFER: Self = FormatFeatureFlags(0b1000000);
#[doc = "Format can be used for color attachment images"]
pub const COLOR_ATTACHMENT: Self = FormatFeatureFlags(0b10000000);
#[doc = "Format supports blending in case it is used for color attachment images"]
pub const COLOR_ATTACHMENT_BLEND: Self = FormatFeatureFlags(0b100000000);
#[doc = "Format can be used for depth/stencil attachment images"]
pub const DEPTH_STENCIL_ATTACHMENT: Self = FormatFeatureFlags(0b1000000000);
#[doc = "Format can be used as the source image of blits with vkCmdBlitImage"]
pub const BLIT_SRC: Self = FormatFeatureFlags(0b10000000000);
#[doc = "Format can be used as the destination image of blits with vkCmdBlitImage"]
pub const BLIT_DST: Self = FormatFeatureFlags(0b100000000000);
#[doc = "Format can be filtered with VK_FILTER_LINEAR when being sampled"]
pub const SAMPLED_IMAGE_FILTER_LINEAR: Self = FormatFeatureFlags(0b1000000000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryControlFlagBits.html>"]
pub struct QueryControlFlags(pub(crate) Flags);
vk_bitflags_wrapped!(QueryControlFlags, 0b1, Flags);
impl QueryControlFlags {
#[doc = "Require precise results to be collected by the query"]
pub const PRECISE: Self = QueryControlFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryResultFlagBits.html>"]
pub struct QueryResultFlags(pub(crate) Flags);
vk_bitflags_wrapped!(QueryResultFlags, 0b1111, Flags);
impl QueryResultFlags {
#[doc = "Results of the queries are written to the destination buffer as 64-bit values"]
pub const TYPE_64: Self = QueryResultFlags(0b1);
#[doc = "Results of the queries are waited on before proceeding with the result copy"]
pub const WAIT: Self = QueryResultFlags(0b10);
#[doc = "Besides the results of the query, the availability of the results is also written"]
pub const WITH_AVAILABILITY: Self = QueryResultFlags(0b100);
#[doc = "Copy the partial results of the query even if the final results are not available"]
pub const PARTIAL: Self = QueryResultFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferUsageFlagBits.html>"]
pub struct CommandBufferUsageFlags(pub(crate) Flags);
vk_bitflags_wrapped!(CommandBufferUsageFlags, 0b111, Flags);
impl CommandBufferUsageFlags {
pub const ONE_TIME_SUBMIT: Self = CommandBufferUsageFlags(0b1);
pub const RENDER_PASS_CONTINUE: Self = CommandBufferUsageFlags(0b10);
#[doc = "Command buffer may be submitted/executed more than once simultaneously"]
pub const SIMULTANEOUS_USE: Self = CommandBufferUsageFlags(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkQueryPipelineStatisticFlagBits.html>"]
pub struct QueryPipelineStatisticFlags(pub(crate) Flags);
vk_bitflags_wrapped!(QueryPipelineStatisticFlags, 0b11111111111, Flags);
impl QueryPipelineStatisticFlags {
#[doc = "Optional"]
pub const INPUT_ASSEMBLY_VERTICES: Self = QueryPipelineStatisticFlags(0b1);
#[doc = "Optional"]
pub const INPUT_ASSEMBLY_PRIMITIVES: Self = QueryPipelineStatisticFlags(0b10);
#[doc = "Optional"]
pub const VERTEX_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(0b100);
#[doc = "Optional"]
pub const GEOMETRY_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(0b1000);
#[doc = "Optional"]
pub const GEOMETRY_SHADER_PRIMITIVES: Self = QueryPipelineStatisticFlags(0b10000);
#[doc = "Optional"]
pub const CLIPPING_INVOCATIONS: Self = QueryPipelineStatisticFlags(0b100000);
#[doc = "Optional"]
pub const CLIPPING_PRIMITIVES: Self = QueryPipelineStatisticFlags(0b1000000);
#[doc = "Optional"]
pub const FRAGMENT_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(0b10000000);
#[doc = "Optional"]
pub const TESSELLATION_CONTROL_SHADER_PATCHES: Self = QueryPipelineStatisticFlags(0b100000000);
#[doc = "Optional"]
pub const TESSELLATION_EVALUATION_SHADER_INVOCATIONS: Self =
QueryPipelineStatisticFlags(0b1000000000);
#[doc = "Optional"]
pub const COMPUTE_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(0b10000000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkImageAspectFlagBits.html>"]
pub struct ImageAspectFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ImageAspectFlags, 0b1111, Flags);
impl ImageAspectFlags {
pub const COLOR: Self = ImageAspectFlags(0b1);
pub const DEPTH: Self = ImageAspectFlags(0b10);
pub const STENCIL: Self = ImageAspectFlags(0b100);
pub const METADATA: Self = ImageAspectFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseImageFormatFlagBits.html>"]
pub struct SparseImageFormatFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SparseImageFormatFlags, 0b111, Flags);
impl SparseImageFormatFlags {
#[doc = "Image uses a single mip tail region for all array layers"]
pub const SINGLE_MIPTAIL: Self = SparseImageFormatFlags(0b1);
#[doc = "Image requires mip level dimensions to be an integer multiple of the sparse image block dimensions for non-tail mip levels."]
pub const ALIGNED_MIP_SIZE: Self = SparseImageFormatFlags(0b10);
#[doc = "Image uses a non-standard sparse image block dimensions"]
pub const NONSTANDARD_BLOCK_SIZE: Self = SparseImageFormatFlags(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSparseMemoryBindFlagBits.html>"]
pub struct SparseMemoryBindFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SparseMemoryBindFlags, 0b1, Flags);
impl SparseMemoryBindFlags {
#[doc = "Operation binds resource metadata to memory"]
pub const METADATA: Self = SparseMemoryBindFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPipelineStageFlagBits.html>"]
pub struct PipelineStageFlags(pub(crate) Flags);
vk_bitflags_wrapped!(PipelineStageFlags, 0b11111111111111111, Flags);
impl PipelineStageFlags {
#[doc = "Before subsequent commands are processed"]
pub const TOP_OF_PIPE: Self = PipelineStageFlags(0b1);
#[doc = "Draw/DispatchIndirect command fetch"]
pub const DRAW_INDIRECT: Self = PipelineStageFlags(0b10);
#[doc = "Vertex/index fetch"]
pub const VERTEX_INPUT: Self = PipelineStageFlags(0b100);
#[doc = "Vertex shading"]
pub const VERTEX_SHADER: Self = PipelineStageFlags(0b1000);
#[doc = "Tessellation control shading"]
pub const TESSELLATION_CONTROL_SHADER: Self = PipelineStageFlags(0b10000);
#[doc = "Tessellation evaluation shading"]
pub const TESSELLATION_EVALUATION_SHADER: Self = PipelineStageFlags(0b100000);
#[doc = "Geometry shading"]
pub const GEOMETRY_SHADER: Self = PipelineStageFlags(0b1000000);
#[doc = "Fragment shading"]
pub const FRAGMENT_SHADER: Self = PipelineStageFlags(0b10000000);
#[doc = "Early fragment (depth and stencil) tests"]
pub const EARLY_FRAGMENT_TESTS: Self = PipelineStageFlags(0b100000000);
#[doc = "Late fragment (depth and stencil) tests"]
pub const LATE_FRAGMENT_TESTS: Self = PipelineStageFlags(0b1000000000);
#[doc = "Color attachment writes"]
pub const COLOR_ATTACHMENT_OUTPUT: Self = PipelineStageFlags(0b10000000000);
#[doc = "Compute shading"]
pub const COMPUTE_SHADER: Self = PipelineStageFlags(0b100000000000);
#[doc = "Transfer/copy operations"]
pub const TRANSFER: Self = PipelineStageFlags(0b1000000000000);
#[doc = "After previous commands have completed"]
pub const BOTTOM_OF_PIPE: Self = PipelineStageFlags(0b10000000000000);
#[doc = "Indicates host (CPU) is a source/sink of the dependency"]
pub const HOST: Self = PipelineStageFlags(0b100000000000000);
#[doc = "All stages of the graphics pipeline"]
pub const ALL_GRAPHICS: Self = PipelineStageFlags(0b1000000000000000);
#[doc = "All stages supported on the queue"]
pub const ALL_COMMANDS: Self = PipelineStageFlags(0b10000000000000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandPoolCreateFlagBits.html>"]
pub struct CommandPoolCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(CommandPoolCreateFlags, 0b11, Flags);
impl CommandPoolCreateFlags {
#[doc = "Command buffers have a short lifetime"]
pub const TRANSIENT: Self = CommandPoolCreateFlags(0b1);
#[doc = "Command buffers may release their memory individually"]
pub const RESET_COMMAND_BUFFER: Self = CommandPoolCreateFlags(0b10);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandPoolResetFlagBits.html>"]
pub struct CommandPoolResetFlags(pub(crate) Flags);
vk_bitflags_wrapped!(CommandPoolResetFlags, 0b1, Flags);
impl CommandPoolResetFlags {
#[doc = "Release resources owned by the pool"]
pub const RELEASE_RESOURCES: Self = CommandPoolResetFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCommandBufferResetFlagBits.html>"]
pub struct CommandBufferResetFlags(pub(crate) Flags);
vk_bitflags_wrapped!(CommandBufferResetFlags, 0b1, Flags);
impl CommandBufferResetFlags {
#[doc = "Release resources owned by the buffer"]
pub const RELEASE_RESOURCES: Self = CommandBufferResetFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSampleCountFlagBits.html>"]
pub struct SampleCountFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SampleCountFlags, 0b1111111, Flags);
impl SampleCountFlags {
#[doc = "Sample count 1 supported"]
pub const TYPE_1: Self = SampleCountFlags(0b1);
#[doc = "Sample count 2 supported"]
pub const TYPE_2: Self = SampleCountFlags(0b10);
#[doc = "Sample count 4 supported"]
pub const TYPE_4: Self = SampleCountFlags(0b100);
#[doc = "Sample count 8 supported"]
pub const TYPE_8: Self = SampleCountFlags(0b1000);
#[doc = "Sample count 16 supported"]
pub const TYPE_16: Self = SampleCountFlags(0b10000);
#[doc = "Sample count 32 supported"]
pub const TYPE_32: Self = SampleCountFlags(0b100000);
#[doc = "Sample count 64 supported"]
pub const TYPE_64: Self = SampleCountFlags(0b1000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkAttachmentDescriptionFlagBits.html>"]
pub struct AttachmentDescriptionFlags(pub(crate) Flags);
vk_bitflags_wrapped!(AttachmentDescriptionFlags, 0b1, Flags);
impl AttachmentDescriptionFlags {
#[doc = "The attachment may alias physical memory of another attachment in the same render pass"]
pub const MAY_ALIAS: Self = AttachmentDescriptionFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkStencilFaceFlagBits.html>"]
pub struct StencilFaceFlags(pub(crate) Flags);
vk_bitflags_wrapped!(StencilFaceFlags, 0b11, Flags);
impl StencilFaceFlags {
#[doc = "Front face"]
pub const FRONT: Self = StencilFaceFlags(0b1);
#[doc = "Back face"]
pub const BACK: Self = StencilFaceFlags(0b10);
#[doc = "Front and back faces"]
pub const STENCIL_FRONT_AND_BACK: Self = StencilFaceFlags(0x00000003);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorPoolCreateFlagBits.html>"]
pub struct DescriptorPoolCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(DescriptorPoolCreateFlags, 0b1, Flags);
impl DescriptorPoolCreateFlags {
#[doc = "Descriptor sets may be freed individually"]
pub const FREE_DESCRIPTOR_SET: Self = DescriptorPoolCreateFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDependencyFlagBits.html>"]
pub struct DependencyFlags(pub(crate) Flags);
vk_bitflags_wrapped!(DependencyFlags, 0b1, Flags);
impl DependencyFlags {
#[doc = "Dependency is per pixel region "]
pub const BY_REGION: Self = DependencyFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDisplayPlaneAlphaFlagBitsKHR.html>"]
pub struct DisplayPlaneAlphaFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(DisplayPlaneAlphaFlagsKHR, 0b1111, Flags);
impl DisplayPlaneAlphaFlagsKHR {
pub const OPAQUE: Self = DisplayPlaneAlphaFlagsKHR(0b1);
pub const GLOBAL: Self = DisplayPlaneAlphaFlagsKHR(0b10);
pub const PER_PIXEL: Self = DisplayPlaneAlphaFlagsKHR(0b100);
pub const PER_PIXEL_PREMULTIPLIED: Self = DisplayPlaneAlphaFlagsKHR(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkCompositeAlphaFlagBitsKHR.html>"]
pub struct CompositeAlphaFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(CompositeAlphaFlagsKHR, 0b1111, Flags);
impl CompositeAlphaFlagsKHR {
pub const OPAQUE: Self = CompositeAlphaFlagsKHR(0b1);
pub const PRE_MULTIPLIED: Self = CompositeAlphaFlagsKHR(0b10);
pub const POST_MULTIPLIED: Self = CompositeAlphaFlagsKHR(0b100);
pub const INHERIT: Self = CompositeAlphaFlagsKHR(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceTransformFlagBitsKHR.html>"]
pub struct SurfaceTransformFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(SurfaceTransformFlagsKHR, 0b111111111, Flags);
impl SurfaceTransformFlagsKHR {
pub const IDENTITY: Self = SurfaceTransformFlagsKHR(0b1);
pub const ROTATE_90: Self = SurfaceTransformFlagsKHR(0b10);
pub const ROTATE_180: Self = SurfaceTransformFlagsKHR(0b100);
pub const ROTATE_270: Self = SurfaceTransformFlagsKHR(0b1000);
pub const HORIZONTAL_MIRROR: Self = SurfaceTransformFlagsKHR(0b10000);
pub const HORIZONTAL_MIRROR_ROTATE_90: Self = SurfaceTransformFlagsKHR(0b100000);
pub const HORIZONTAL_MIRROR_ROTATE_180: Self = SurfaceTransformFlagsKHR(0b1000000);
pub const HORIZONTAL_MIRROR_ROTATE_270: Self = SurfaceTransformFlagsKHR(0b10000000);
pub const INHERIT: Self = SurfaceTransformFlagsKHR(0b100000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugReportFlagBitsEXT.html>"]
pub struct DebugReportFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(DebugReportFlagsEXT, 0b11111, Flags);
impl DebugReportFlagsEXT {
pub const INFORMATION: Self = DebugReportFlagsEXT(0b1);
pub const WARNING: Self = DebugReportFlagsEXT(0b10);
pub const PERFORMANCE_WARNING: Self = DebugReportFlagsEXT(0b100);
pub const ERROR: Self = DebugReportFlagsEXT(0b1000);
pub const DEBUG: Self = DebugReportFlagsEXT(0b10000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryHandleTypeFlagBitsNV.html>"]
pub struct ExternalMemoryHandleTypeFlagsNV(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlagsNV, 0b1111, Flags);
impl ExternalMemoryHandleTypeFlagsNV {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_NV: Self =
ExternalMemoryHandleTypeFlagsNV(0b1);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_NV: Self =
ExternalMemoryHandleTypeFlagsNV(0b10);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_NV: Self =
ExternalMemoryHandleTypeFlagsNV(0b100);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_NV: Self =
ExternalMemoryHandleTypeFlagsNV(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryFeatureFlagBitsNV.html>"]
pub struct ExternalMemoryFeatureFlagsNV(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalMemoryFeatureFlagsNV, 0b111, Flags);
impl ExternalMemoryFeatureFlagsNV {
pub const EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_NV: Self = ExternalMemoryFeatureFlagsNV(0b1);
pub const EXTERNAL_MEMORY_FEATURE_EXPORTABLE_NV: Self = ExternalMemoryFeatureFlagsNV(0b10);
pub const EXTERNAL_MEMORY_FEATURE_IMPORTABLE_NV: Self = ExternalMemoryFeatureFlagsNV(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubgroupFeatureFlagBits.html>"]
pub struct SubgroupFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SubgroupFeatureFlags, 0b11111111, Flags);
impl SubgroupFeatureFlags {
#[doc = "Basic subgroup operations"]
pub const BASIC: Self = SubgroupFeatureFlags(0b1);
#[doc = "Vote subgroup operations"]
pub const VOTE: Self = SubgroupFeatureFlags(0b10);
#[doc = "Arithmetic subgroup operations"]
pub const ARITHMETIC: Self = SubgroupFeatureFlags(0b100);
#[doc = "Ballot subgroup operations"]
pub const BALLOT: Self = SubgroupFeatureFlags(0b1000);
#[doc = "Shuffle subgroup operations"]
pub const SHUFFLE: Self = SubgroupFeatureFlags(0b10000);
#[doc = "Shuffle relative subgroup operations"]
pub const SHUFFLE_RELATIVE: Self = SubgroupFeatureFlags(0b100000);
#[doc = "Clustered subgroup operations"]
pub const CLUSTERED: Self = SubgroupFeatureFlags(0b1000000);
#[doc = "Quad subgroup operations"]
pub const QUAD: Self = SubgroupFeatureFlags(0b10000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkIndirectCommandsLayoutUsageFlagBitsNVX.html>"]
pub struct IndirectCommandsLayoutUsageFlagsNVX(pub(crate) Flags);
vk_bitflags_wrapped!(IndirectCommandsLayoutUsageFlagsNVX, 0b1111, Flags);
impl IndirectCommandsLayoutUsageFlagsNVX {
pub const UNORDERED_SEQUENCES: Self = IndirectCommandsLayoutUsageFlagsNVX(0b1);
pub const SPARSE_SEQUENCES: Self = IndirectCommandsLayoutUsageFlagsNVX(0b10);
pub const EMPTY_EXECUTIONS: Self = IndirectCommandsLayoutUsageFlagsNVX(0b100);
pub const INDEXED_SEQUENCES: Self = IndirectCommandsLayoutUsageFlagsNVX(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkObjectEntryUsageFlagBitsNVX.html>"]
pub struct ObjectEntryUsageFlagsNVX(pub(crate) Flags);
vk_bitflags_wrapped!(ObjectEntryUsageFlagsNVX, 0b11, Flags);
impl ObjectEntryUsageFlagsNVX {
pub const GRAPHICS: Self = ObjectEntryUsageFlagsNVX(0b1);
pub const COMPUTE: Self = ObjectEntryUsageFlagsNVX(0b10);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorSetLayoutCreateFlagBits.html>"]
pub struct DescriptorSetLayoutCreateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(DescriptorSetLayoutCreateFlags, 0b0, Flags);
impl DescriptorSetLayoutCreateFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryHandleTypeFlagBits.html>"]
pub struct ExternalMemoryHandleTypeFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlags, 0b1111111, Flags);
impl ExternalMemoryHandleTypeFlags {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD: Self = ExternalMemoryHandleTypeFlags(0b1);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32: Self = ExternalMemoryHandleTypeFlags(0b10);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT: Self =
ExternalMemoryHandleTypeFlags(0b100);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE: Self =
ExternalMemoryHandleTypeFlags(0b1000);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT: Self =
ExternalMemoryHandleTypeFlags(0b10000);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP: Self =
ExternalMemoryHandleTypeFlags(0b100000);
pub const EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE: Self =
ExternalMemoryHandleTypeFlags(0b1000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalMemoryFeatureFlagBits.html>"]
pub struct ExternalMemoryFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalMemoryFeatureFlags, 0b111, Flags);
impl ExternalMemoryFeatureFlags {
pub const EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY: Self = ExternalMemoryFeatureFlags(0b1);
pub const EXTERNAL_MEMORY_FEATURE_EXPORTABLE: Self = ExternalMemoryFeatureFlags(0b10);
pub const EXTERNAL_MEMORY_FEATURE_IMPORTABLE: Self = ExternalMemoryFeatureFlags(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalSemaphoreHandleTypeFlagBits.html>"]
pub struct ExternalSemaphoreHandleTypeFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalSemaphoreHandleTypeFlags, 0b11111, Flags);
impl ExternalSemaphoreHandleTypeFlags {
pub const EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD: Self =
ExternalSemaphoreHandleTypeFlags(0b1);
pub const EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32: Self =
ExternalSemaphoreHandleTypeFlags(0b10);
pub const EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT: Self =
ExternalSemaphoreHandleTypeFlags(0b100);
pub const EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE: Self =
ExternalSemaphoreHandleTypeFlags(0b1000);
pub const EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD: Self =
ExternalSemaphoreHandleTypeFlags(0b10000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalSemaphoreFeatureFlagBits.html>"]
pub struct ExternalSemaphoreFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalSemaphoreFeatureFlags, 0b11, Flags);
impl ExternalSemaphoreFeatureFlags {
pub const EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE: Self = ExternalSemaphoreFeatureFlags(0b1);
pub const EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE: Self = ExternalSemaphoreFeatureFlags(0b10);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSemaphoreImportFlagBits.html>"]
pub struct SemaphoreImportFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SemaphoreImportFlags, 0b1, Flags);
impl SemaphoreImportFlags {
pub const TEMPORARY: Self = SemaphoreImportFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalFenceHandleTypeFlagBits.html>"]
pub struct ExternalFenceHandleTypeFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalFenceHandleTypeFlags, 0b1111, Flags);
impl ExternalFenceHandleTypeFlags {
pub const EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD: Self = ExternalFenceHandleTypeFlags(0b1);
pub const EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32: Self = ExternalFenceHandleTypeFlags(0b10);
pub const EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT: Self =
ExternalFenceHandleTypeFlags(0b100);
pub const EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD: Self = ExternalFenceHandleTypeFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkExternalFenceFeatureFlagBits.html>"]
pub struct ExternalFenceFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(ExternalFenceFeatureFlags, 0b11, Flags);
impl ExternalFenceFeatureFlags {
pub const EXTERNAL_FENCE_FEATURE_EXPORTABLE: Self = ExternalFenceFeatureFlags(0b1);
pub const EXTERNAL_FENCE_FEATURE_IMPORTABLE: Self = ExternalFenceFeatureFlags(0b10);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkFenceImportFlagBits.html>"]
pub struct FenceImportFlags(pub(crate) Flags);
vk_bitflags_wrapped!(FenceImportFlags, 0b1, Flags);
impl FenceImportFlags {
pub const TEMPORARY: Self = FenceImportFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSurfaceCounterFlagBitsEXT.html>"]
pub struct SurfaceCounterFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(SurfaceCounterFlagsEXT, 0b1, Flags);
impl SurfaceCounterFlagsEXT {
pub const VBLANK: Self = SurfaceCounterFlagsEXT(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkPeerMemoryFeatureFlagBits.html>"]
pub struct PeerMemoryFeatureFlags(pub(crate) Flags);
vk_bitflags_wrapped!(PeerMemoryFeatureFlags, 0b1111, Flags);
impl PeerMemoryFeatureFlags {
#[doc = "Can read with vkCmdCopy commands"]
pub const COPY_SRC: Self = PeerMemoryFeatureFlags(0b1);
#[doc = "Can write with vkCmdCopy commands"]
pub const COPY_DST: Self = PeerMemoryFeatureFlags(0b10);
#[doc = "Can read with any access type/command"]
pub const GENERIC_SRC: Self = PeerMemoryFeatureFlags(0b100);
#[doc = "Can write with and access type/command"]
pub const GENERIC_DST: Self = PeerMemoryFeatureFlags(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkMemoryAllocateFlagBits.html>"]
pub struct MemoryAllocateFlags(pub(crate) Flags);
vk_bitflags_wrapped!(MemoryAllocateFlags, 0b1, Flags);
impl MemoryAllocateFlags {
#[doc = "Force allocation on specific devices"]
pub const DEVICE_MASK: Self = MemoryAllocateFlags(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDeviceGroupPresentModeFlagBitsKHR.html>"]
pub struct DeviceGroupPresentModeFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(DeviceGroupPresentModeFlagsKHR, 0b1111, Flags);
impl DeviceGroupPresentModeFlagsKHR {
#[doc = "Present from local memory"]
pub const LOCAL: Self = DeviceGroupPresentModeFlagsKHR(0b1);
#[doc = "Present from remote memory"]
pub const REMOTE: Self = DeviceGroupPresentModeFlagsKHR(0b10);
#[doc = "Present sum of local and/or remote memory"]
pub const SUM: Self = DeviceGroupPresentModeFlagsKHR(0b100);
#[doc = "Each physical device presents from local memory"]
pub const LOCAL_MULTI_DEVICE: Self = DeviceGroupPresentModeFlagsKHR(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSwapchainCreateFlagBitsKHR.html>"]
pub struct SwapchainCreateFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(SwapchainCreateFlagsKHR, 0b0, Flags);
impl SwapchainCreateFlagsKHR {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkSubpassDescriptionFlagBits.html>"]
pub struct SubpassDescriptionFlags(pub(crate) Flags);
vk_bitflags_wrapped!(SubpassDescriptionFlags, 0b0, Flags);
impl SubpassDescriptionFlags {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessageSeverityFlagBitsEXT.html>"]
pub struct DebugUtilsMessageSeverityFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(DebugUtilsMessageSeverityFlagsEXT, 0b1000100010001, Flags);
impl DebugUtilsMessageSeverityFlagsEXT {
pub const VERBOSE: Self = DebugUtilsMessageSeverityFlagsEXT(0b1);
pub const INFO: Self = DebugUtilsMessageSeverityFlagsEXT(0b10000);
pub const WARNING: Self = DebugUtilsMessageSeverityFlagsEXT(0b100000000);
pub const ERROR: Self = DebugUtilsMessageSeverityFlagsEXT(0b1000000000000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDebugUtilsMessageTypeFlagBitsEXT.html>"]
pub struct DebugUtilsMessageTypeFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(DebugUtilsMessageTypeFlagsEXT, 0b111, Flags);
impl DebugUtilsMessageTypeFlagsEXT {
pub const GENERAL: Self = DebugUtilsMessageTypeFlagsEXT(0b1);
pub const VALIDATION: Self = DebugUtilsMessageTypeFlagsEXT(0b10);
pub const PERFORMANCE: Self = DebugUtilsMessageTypeFlagsEXT(0b100);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkDescriptorBindingFlagBitsEXT.html>"]
pub struct DescriptorBindingFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(DescriptorBindingFlagsEXT, 0b1111, Flags);
impl DescriptorBindingFlagsEXT {
pub const UPDATE_AFTER_BIND: Self = DescriptorBindingFlagsEXT(0b1);
pub const UPDATE_UNUSED_WHILE_PENDING: Self = DescriptorBindingFlagsEXT(0b10);
pub const PARTIALLY_BOUND: Self = DescriptorBindingFlagsEXT(0b100);
pub const VARIABLE_DESCRIPTOR_COUNT: Self = DescriptorBindingFlagsEXT(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkConditionalRenderingFlagBitsEXT.html>"]
pub struct ConditionalRenderingFlagsEXT(pub(crate) Flags);
vk_bitflags_wrapped!(ConditionalRenderingFlagsEXT, 0b1, Flags);
impl ConditionalRenderingFlagsEXT {
pub const INVERTED: Self = ConditionalRenderingFlagsEXT(0b1);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkResolveModeFlagBitsKHR.html>"]
pub struct ResolveModeFlagsKHR(pub(crate) Flags);
vk_bitflags_wrapped!(ResolveModeFlagsKHR, 0b1111, Flags);
impl ResolveModeFlagsKHR {
pub const NONE: Self = ResolveModeFlagsKHR(0);
pub const SAMPLE_ZERO: Self = ResolveModeFlagsKHR(0b1);
pub const AVERAGE: Self = ResolveModeFlagsKHR(0b10);
pub const MIN: Self = ResolveModeFlagsKHR(0b100);
pub const MAX: Self = ResolveModeFlagsKHR(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryInstanceFlagBitsNV.html>"]
pub struct GeometryInstanceFlagsNV(pub(crate) Flags);
vk_bitflags_wrapped!(GeometryInstanceFlagsNV, 0b1111, Flags);
impl GeometryInstanceFlagsNV {
pub const TRIANGLE_CULL_DISABLE: Self = GeometryInstanceFlagsNV(0b1);
pub const TRIANGLE_FRONT_COUNTERCLOCKWISE: Self = GeometryInstanceFlagsNV(0b10);
pub const FORCE_OPAQUE: Self = GeometryInstanceFlagsNV(0b100);
pub const FORCE_NO_OPAQUE: Self = GeometryInstanceFlagsNV(0b1000);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkGeometryFlagBitsNV.html>"]
pub struct GeometryFlagsNV(pub(crate) Flags);
vk_bitflags_wrapped!(GeometryFlagsNV, 0b11, Flags);
impl GeometryFlagsNV {
pub const OPAQUE: Self = GeometryFlagsNV(0b1);
pub const NO_DUPLICATE_ANY_HIT_INVOCATION: Self = GeometryFlagsNV(0b10);
}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkBuildAccelerationStructureFlagBitsNV.html>"]
pub struct BuildAccelerationStructureFlagsNV(pub(crate) Flags);
vk_bitflags_wrapped!(BuildAccelerationStructureFlagsNV, 0b11111, Flags);
impl BuildAccelerationStructureFlagsNV {
pub const ALLOW_UPDATE: Self = BuildAccelerationStructureFlagsNV(0b1);
pub const ALLOW_COMPACTION: Self = BuildAccelerationStructureFlagsNV(0b10);
pub const PREFER_FAST_TRACE: Self = BuildAccelerationStructureFlagsNV(0b100);
pub const PREFER_FAST_BUILD: Self = BuildAccelerationStructureFlagsNV(0b1000);
pub const LOW_MEMORY: Self = BuildAccelerationStructureFlagsNV(0b10000);
}
pub const MAX_PHYSICAL_DEVICE_NAME_SIZE: usize = 256;
pub const UUID_SIZE: usize = 16;
pub const LUID_SIZE: usize = 8;
pub const MAX_EXTENSION_NAME_SIZE: usize = 256;
pub const MAX_DESCRIPTION_SIZE: usize = 256;
pub const MAX_MEMORY_TYPES: usize = 32;
pub const MAX_MEMORY_HEAPS: usize = 16;
pub const LOD_CLAMP_NONE: f32 = 1000.00;
pub const REMAINING_MIP_LEVELS: u32 = !0;
pub const REMAINING_ARRAY_LAYERS: u32 = !0;
pub const WHOLE_SIZE: u64 = !0;
pub const ATTACHMENT_UNUSED: u32 = !0;
pub const TRUE: Bool32 = 1;
pub const FALSE: Bool32 = 0;
pub const QUEUE_FAMILY_IGNORED: u32 = !0;
pub const QUEUE_FAMILY_EXTERNAL: u32 = !0 - 1;
pub const QUEUE_FAMILY_FOREIGN_EXT: u32 = !0 - 2;
pub const SUBPASS_EXTERNAL: u32 = !0;
pub const MAX_DEVICE_GROUP_SIZE: usize = 32;
pub const MAX_DRIVER_NAME_SIZE_KHR: usize = 256;
pub const MAX_DRIVER_INFO_SIZE_KHR: usize = 256;
pub const SHADER_UNUSED_NV: u32 = !0;
impl KhrSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_surface\0").expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkDestroySurfaceKHR = extern "system" fn(
instance: Instance,
surface: SurfaceKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceSupportKHR = extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
surface: SurfaceKHR,
p_supported: *mut Bool32,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceFormatsKHR = extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormatKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfacePresentModesKHR = extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_present_mode_count: *mut u32,
p_present_modes: *mut PresentModeKHR,
) -> Result;
pub struct KhrSurfaceFn {
pub destroy_surface_khr: extern "system" fn(
instance: Instance,
surface: SurfaceKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_physical_device_surface_support_khr: extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
surface: SurfaceKHR,
p_supported: *mut Bool32,
) -> Result,
pub get_physical_device_surface_capabilities_khr: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
) -> Result,
pub get_physical_device_surface_formats_khr: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormatKHR,
) -> Result,
pub get_physical_device_surface_present_modes_khr: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_present_mode_count: *mut u32,
p_present_modes: *mut PresentModeKHR,
) -> Result,
}
unsafe impl Send for KhrSurfaceFn {}
unsafe impl Sync for KhrSurfaceFn {}
impl ::std::clone::Clone for KhrSurfaceFn {
fn clone(&self) -> Self {
KhrSurfaceFn {
destroy_surface_khr: self.destroy_surface_khr,
get_physical_device_surface_support_khr: self.get_physical_device_surface_support_khr,
get_physical_device_surface_capabilities_khr: self
.get_physical_device_surface_capabilities_khr,
get_physical_device_surface_formats_khr: self.get_physical_device_surface_formats_khr,
get_physical_device_surface_present_modes_khr: self
.get_physical_device_surface_present_modes_khr,
}
}
}
impl KhrSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSurfaceFn {
destroy_surface_khr: unsafe {
extern "system" fn destroy_surface_khr(
_instance: Instance,
_surface: SurfaceKHR,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(destroy_surface_khr)))
}
let raw_name = stringify!(vkDestroySurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_surface_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_surface_support_khr: unsafe {
extern "system" fn get_physical_device_surface_support_khr(
_physical_device: PhysicalDevice,
_queue_family_index: u32,
_surface: SurfaceKHR,
_p_supported: *mut Bool32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_support_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceSupportKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_support_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_surface_capabilities_khr: unsafe {
extern "system" fn get_physical_device_surface_capabilities_khr(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_capabilities_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_capabilities_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_surface_formats_khr: unsafe {
extern "system" fn get_physical_device_surface_formats_khr(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_surface_format_count: *mut u32,
_p_surface_formats: *mut SurfaceFormatKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_formats_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceFormatsKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_formats_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_surface_present_modes_khr: unsafe {
extern "system" fn get_physical_device_surface_present_modes_khr(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_present_mode_count: *mut u32,
_p_present_modes: *mut PresentModeKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_present_modes_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfacePresentModesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_present_modes_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroySurfaceKHR.html>"]
pub unsafe fn destroy_surface_khr(
&self,
instance: Instance,
surface: SurfaceKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_surface_khr)(instance, surface, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html>"]
pub unsafe fn get_physical_device_surface_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
surface: SurfaceKHR,
p_supported: *mut Bool32,
) -> Result {
(self.get_physical_device_surface_support_khr)(
physical_device,
queue_family_index,
surface,
p_supported,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html>"]
pub unsafe fn get_physical_device_surface_capabilities_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
) -> Result {
(self.get_physical_device_surface_capabilities_khr)(
physical_device,
surface,
p_surface_capabilities,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html>"]
pub unsafe fn get_physical_device_surface_formats_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormatKHR,
) -> Result {
(self.get_physical_device_surface_formats_khr)(
physical_device,
surface,
p_surface_format_count,
p_surface_formats,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html>"]
pub unsafe fn get_physical_device_surface_present_modes_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_present_mode_count: *mut u32,
p_present_modes: *mut PresentModeKHR,
) -> Result {
(self.get_physical_device_surface_present_modes_khr)(
physical_device,
surface,
p_present_mode_count,
p_present_modes,
)
}
}
#[doc = "Generated from \'VK_KHR_surface\'"]
impl Result {
pub const ERROR_SURFACE_LOST_KHR: Self = Result(-1000000000);
}
#[doc = "Generated from \'VK_KHR_surface\'"]
impl Result {
pub const ERROR_NATIVE_WINDOW_IN_USE_KHR: Self = Result(-1000000001);
}
#[doc = "Generated from \'VK_KHR_surface\'"]
impl ObjectType {
pub const SURFACE_KHR: Self = ObjectType(1000000000);
}
impl KhrSwapchainFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_swapchain\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateSwapchainKHR = extern "system" fn(
device: Device,
p_create_info: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchain: *mut SwapchainKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroySwapchainKHR = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetSwapchainImagesKHR = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_swapchain_image_count: *mut u32,
p_swapchain_images: *mut Image,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAcquireNextImageKHR = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
timeout: u64,
semaphore: Semaphore,
fence: Fence,
p_image_index: *mut u32,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkQueuePresentKHR =
extern "system" fn(queue: Queue, p_present_info: *const PresentInfoKHR) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceGroupPresentCapabilitiesKHR = extern "system" fn(
device: Device,
p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDeviceGroupSurfacePresentModesKHR = extern "system" fn(
device: Device,
surface: SurfaceKHR,
p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDevicePresentRectanglesKHR = extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_rect_count: *mut u32,
p_rects: *mut Rect2D,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAcquireNextImage2KHR = extern "system" fn(
device: Device,
p_acquire_info: *const AcquireNextImageInfoKHR,
p_image_index: *mut u32,
) -> Result;
pub struct KhrSwapchainFn {
pub create_swapchain_khr: extern "system" fn(
device: Device,
p_create_info: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchain: *mut SwapchainKHR,
) -> Result,
pub destroy_swapchain_khr: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_swapchain_images_khr: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_swapchain_image_count: *mut u32,
p_swapchain_images: *mut Image,
) -> Result,
pub acquire_next_image_khr: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
timeout: u64,
semaphore: Semaphore,
fence: Fence,
p_image_index: *mut u32,
) -> Result,
pub queue_present_khr:
extern "system" fn(queue: Queue, p_present_info: *const PresentInfoKHR) -> Result,
pub get_device_group_present_capabilities_khr: extern "system" fn(
device: Device,
p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result,
pub get_device_group_surface_present_modes_khr: extern "system" fn(
device: Device,
surface: SurfaceKHR,
p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result,
pub get_physical_device_present_rectangles_khr: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_rect_count: *mut u32,
p_rects: *mut Rect2D,
) -> Result,
pub acquire_next_image2_khr: extern "system" fn(
device: Device,
p_acquire_info: *const AcquireNextImageInfoKHR,
p_image_index: *mut u32,
) -> Result,
}
unsafe impl Send for KhrSwapchainFn {}
unsafe impl Sync for KhrSwapchainFn {}
impl ::std::clone::Clone for KhrSwapchainFn {
fn clone(&self) -> Self {
KhrSwapchainFn {
create_swapchain_khr: self.create_swapchain_khr,
destroy_swapchain_khr: self.destroy_swapchain_khr,
get_swapchain_images_khr: self.get_swapchain_images_khr,
acquire_next_image_khr: self.acquire_next_image_khr,
queue_present_khr: self.queue_present_khr,
get_device_group_present_capabilities_khr: self
.get_device_group_present_capabilities_khr,
get_device_group_surface_present_modes_khr: self
.get_device_group_surface_present_modes_khr,
get_physical_device_present_rectangles_khr: self
.get_physical_device_present_rectangles_khr,
acquire_next_image2_khr: self.acquire_next_image2_khr,
}
}
}
impl KhrSwapchainFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSwapchainFn {
create_swapchain_khr: unsafe {
extern "system" fn create_swapchain_khr(
_device: Device,
_p_create_info: *const SwapchainCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_swapchain: *mut SwapchainKHR,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_swapchain_khr)))
}
let raw_name = stringify!(vkCreateSwapchainKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_swapchain_khr
} else {
::std::mem::transmute(val)
}
},
destroy_swapchain_khr: unsafe {
extern "system" fn destroy_swapchain_khr(
_device: Device,
_swapchain: SwapchainKHR,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_swapchain_khr)
))
}
let raw_name = stringify!(vkDestroySwapchainKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_swapchain_khr
} else {
::std::mem::transmute(val)
}
},
get_swapchain_images_khr: unsafe {
extern "system" fn get_swapchain_images_khr(
_device: Device,
_swapchain: SwapchainKHR,
_p_swapchain_image_count: *mut u32,
_p_swapchain_images: *mut Image,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_images_khr)
))
}
let raw_name = stringify!(vkGetSwapchainImagesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_swapchain_images_khr
} else {
::std::mem::transmute(val)
}
},
acquire_next_image_khr: unsafe {
extern "system" fn acquire_next_image_khr(
_device: Device,
_swapchain: SwapchainKHR,
_timeout: u64,
_semaphore: Semaphore,
_fence: Fence,
_p_image_index: *mut u32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_next_image_khr)
))
}
let raw_name = stringify!(vkAcquireNextImageKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
acquire_next_image_khr
} else {
::std::mem::transmute(val)
}
},
queue_present_khr: unsafe {
extern "system" fn queue_present_khr(
_queue: Queue,
_p_present_info: *const PresentInfoKHR,
) -> Result {
panic!(concat!("Unable to load ", stringify!(queue_present_khr)))
}
let raw_name = stringify!(vkQueuePresentKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_present_khr
} else {
::std::mem::transmute(val)
}
},
get_device_group_present_capabilities_khr: unsafe {
extern "system" fn get_device_group_present_capabilities_khr(
_device: Device,
_p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_device_group_present_capabilities_khr)
))
}
let raw_name = stringify!(vkGetDeviceGroupPresentCapabilitiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_group_present_capabilities_khr
} else {
::std::mem::transmute(val)
}
},
get_device_group_surface_present_modes_khr: unsafe {
extern "system" fn get_device_group_surface_present_modes_khr(
_device: Device,
_surface: SurfaceKHR,
_p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_device_group_surface_present_modes_khr)
))
}
let raw_name = stringify!(vkGetDeviceGroupSurfacePresentModesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_group_surface_present_modes_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_present_rectangles_khr: unsafe {
extern "system" fn get_physical_device_present_rectangles_khr(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_rect_count: *mut u32,
_p_rects: *mut Rect2D,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_present_rectangles_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDevicePresentRectanglesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_present_rectangles_khr
} else {
::std::mem::transmute(val)
}
},
acquire_next_image2_khr: unsafe {
extern "system" fn acquire_next_image2_khr(
_device: Device,
_p_acquire_info: *const AcquireNextImageInfoKHR,
_p_image_index: *mut u32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_next_image2_khr)
))
}
let raw_name = stringify!(vkAcquireNextImage2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
acquire_next_image2_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateSwapchainKHR.html>"]
pub unsafe fn create_swapchain_khr(
&self,
device: Device,
p_create_info: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchain: *mut SwapchainKHR,
) -> Result {
(self.create_swapchain_khr)(device, p_create_info, p_allocator, p_swapchain)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroySwapchainKHR.html>"]
pub unsafe fn destroy_swapchain_khr(
&self,
device: Device,
swapchain: SwapchainKHR,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_swapchain_khr)(device, swapchain, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSwapchainImagesKHR.html>"]
pub unsafe fn get_swapchain_images_khr(
&self,
device: Device,
swapchain: SwapchainKHR,
p_swapchain_image_count: *mut u32,
p_swapchain_images: *mut Image,
) -> Result {
(self.get_swapchain_images_khr)(
device,
swapchain,
p_swapchain_image_count,
p_swapchain_images,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAcquireNextImageKHR.html>"]
pub unsafe fn acquire_next_image_khr(
&self,
device: Device,
swapchain: SwapchainKHR,
timeout: u64,
semaphore: Semaphore,
fence: Fence,
p_image_index: *mut u32,
) -> Result {
(self.acquire_next_image_khr)(device, swapchain, timeout, semaphore, fence, p_image_index)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueuePresentKHR.html>"]
pub unsafe fn queue_present_khr(
&self,
queue: Queue,
p_present_info: *const PresentInfoKHR,
) -> Result {
(self.queue_present_khr)(queue, p_present_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html>"]
pub unsafe fn get_device_group_present_capabilities_khr(
&self,
device: Device,
p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result {
(self.get_device_group_present_capabilities_khr)(
device,
p_device_group_present_capabilities,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html>"]
pub unsafe fn get_device_group_surface_present_modes_khr(
&self,
device: Device,
surface: SurfaceKHR,
p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result {
(self.get_device_group_surface_present_modes_khr)(device, surface, p_modes)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html>"]
pub unsafe fn get_physical_device_present_rectangles_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_rect_count: *mut u32,
p_rects: *mut Rect2D,
) -> Result {
(self.get_physical_device_present_rectangles_khr)(
physical_device,
surface,
p_rect_count,
p_rects,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAcquireNextImage2KHR.html>"]
pub unsafe fn acquire_next_image2_khr(
&self,
device: Device,
p_acquire_info: *const AcquireNextImageInfoKHR,
p_image_index: *mut u32,
) -> Result {
(self.acquire_next_image2_khr)(device, p_acquire_info, p_image_index)
}
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const SWAPCHAIN_CREATE_INFO_KHR: Self = StructureType(1000001000);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const PRESENT_INFO_KHR: Self = StructureType(1000001001);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl ImageLayout {
pub const PRESENT_SRC_KHR: Self = ImageLayout(1000001002);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl Result {
pub const SUBOPTIMAL_KHR: Self = Result(1000001003);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl Result {
pub const ERROR_OUT_OF_DATE_KHR: Self = Result(-1000001004);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl ObjectType {
pub const SWAPCHAIN_KHR: Self = ObjectType(1000001000);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const DEVICE_GROUP_PRESENT_CAPABILITIES_KHR: Self = StructureType(1000060007);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const IMAGE_SWAPCHAIN_CREATE_INFO_KHR: Self = StructureType(1000060008);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: Self = StructureType(1000060009);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const ACQUIRE_NEXT_IMAGE_INFO_KHR: Self = StructureType(1000060010);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const DEVICE_GROUP_PRESENT_INFO_KHR: Self = StructureType(1000060011);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl StructureType {
pub const DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: Self = StructureType(1000060012);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl SwapchainCreateFlagsKHR {
pub const SPLIT_INSTANCE_BIND_REGIONS: Self = SwapchainCreateFlagsKHR(0b1);
}
#[doc = "Generated from \'VK_KHR_swapchain\'"]
impl SwapchainCreateFlagsKHR {
pub const PROTECTED: Self = SwapchainCreateFlagsKHR(0b10);
}
impl KhrDisplayFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_display\0").expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceDisplayPropertiesKHR = extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPropertiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlanePropertiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDisplayPlaneSupportedDisplaysKHR = extern "system" fn(
physical_device: PhysicalDevice,
plane_index: u32,
p_display_count: *mut u32,
p_displays: *mut DisplayKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDisplayModePropertiesKHR = extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModePropertiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDisplayModeKHR = extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_create_info: *const DisplayModeCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_mode: *mut DisplayModeKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDisplayPlaneCapabilitiesKHR = extern "system" fn(
physical_device: PhysicalDevice,
mode: DisplayModeKHR,
plane_index: u32,
p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDisplayPlaneSurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const DisplaySurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct KhrDisplayFn {
pub get_physical_device_display_properties_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPropertiesKHR,
) -> Result,
pub get_physical_device_display_plane_properties_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlanePropertiesKHR,
) -> Result,
pub get_display_plane_supported_displays_khr: extern "system" fn(
physical_device: PhysicalDevice,
plane_index: u32,
p_display_count: *mut u32,
p_displays: *mut DisplayKHR,
) -> Result,
pub get_display_mode_properties_khr: extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModePropertiesKHR,
) -> Result,
pub create_display_mode_khr: extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_create_info: *const DisplayModeCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_mode: *mut DisplayModeKHR,
) -> Result,
pub get_display_plane_capabilities_khr: extern "system" fn(
physical_device: PhysicalDevice,
mode: DisplayModeKHR,
plane_index: u32,
p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
) -> Result,
pub create_display_plane_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const DisplaySurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for KhrDisplayFn {}
unsafe impl Sync for KhrDisplayFn {}
impl ::std::clone::Clone for KhrDisplayFn {
fn clone(&self) -> Self {
KhrDisplayFn {
get_physical_device_display_properties_khr: self
.get_physical_device_display_properties_khr,
get_physical_device_display_plane_properties_khr: self
.get_physical_device_display_plane_properties_khr,
get_display_plane_supported_displays_khr: self.get_display_plane_supported_displays_khr,
get_display_mode_properties_khr: self.get_display_mode_properties_khr,
create_display_mode_khr: self.create_display_mode_khr,
get_display_plane_capabilities_khr: self.get_display_plane_capabilities_khr,
create_display_plane_surface_khr: self.create_display_plane_surface_khr,
}
}
}
impl KhrDisplayFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDisplayFn {
get_physical_device_display_properties_khr: unsafe {
extern "system" fn get_physical_device_display_properties_khr(
_physical_device: PhysicalDevice,
_p_property_count: *mut u32,
_p_properties: *mut DisplayPropertiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_display_properties_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceDisplayPropertiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_display_properties_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_display_plane_properties_khr: unsafe {
extern "system" fn get_physical_device_display_plane_properties_khr(
_physical_device: PhysicalDevice,
_p_property_count: *mut u32,
_p_properties: *mut DisplayPlanePropertiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_display_plane_properties_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceDisplayPlanePropertiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_display_plane_properties_khr
} else {
::std::mem::transmute(val)
}
},
get_display_plane_supported_displays_khr: unsafe {
extern "system" fn get_display_plane_supported_displays_khr(
_physical_device: PhysicalDevice,
_plane_index: u32,
_p_display_count: *mut u32,
_p_displays: *mut DisplayKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_display_plane_supported_displays_khr)
))
}
let raw_name = stringify!(vkGetDisplayPlaneSupportedDisplaysKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_display_plane_supported_displays_khr
} else {
::std::mem::transmute(val)
}
},
get_display_mode_properties_khr: unsafe {
extern "system" fn get_display_mode_properties_khr(
_physical_device: PhysicalDevice,
_display: DisplayKHR,
_p_property_count: *mut u32,
_p_properties: *mut DisplayModePropertiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_display_mode_properties_khr)
))
}
let raw_name = stringify!(vkGetDisplayModePropertiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_display_mode_properties_khr
} else {
::std::mem::transmute(val)
}
},
create_display_mode_khr: unsafe {
extern "system" fn create_display_mode_khr(
_physical_device: PhysicalDevice,
_display: DisplayKHR,
_p_create_info: *const DisplayModeCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_mode: *mut DisplayModeKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_display_mode_khr)
))
}
let raw_name = stringify!(vkCreateDisplayModeKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_display_mode_khr
} else {
::std::mem::transmute(val)
}
},
get_display_plane_capabilities_khr: unsafe {
extern "system" fn get_display_plane_capabilities_khr(
_physical_device: PhysicalDevice,
_mode: DisplayModeKHR,
_plane_index: u32,
_p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_display_plane_capabilities_khr)
))
}
let raw_name = stringify!(vkGetDisplayPlaneCapabilitiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_display_plane_capabilities_khr
} else {
::std::mem::transmute(val)
}
},
create_display_plane_surface_khr: unsafe {
extern "system" fn create_display_plane_surface_khr(
_instance: Instance,
_p_create_info: *const DisplaySurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_display_plane_surface_khr)
))
}
let raw_name = stringify!(vkCreateDisplayPlaneSurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_display_plane_surface_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html>"]
pub unsafe fn get_physical_device_display_properties_khr(
&self,
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPropertiesKHR,
) -> Result {
(self.get_physical_device_display_properties_khr)(
physical_device,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html>"]
pub unsafe fn get_physical_device_display_plane_properties_khr(
&self,
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlanePropertiesKHR,
) -> Result {
(self.get_physical_device_display_plane_properties_khr)(
physical_device,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html>"]
pub unsafe fn get_display_plane_supported_displays_khr(
&self,
physical_device: PhysicalDevice,
plane_index: u32,
p_display_count: *mut u32,
p_displays: *mut DisplayKHR,
) -> Result {
(self.get_display_plane_supported_displays_khr)(
physical_device,
plane_index,
p_display_count,
p_displays,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDisplayModePropertiesKHR.html>"]
pub unsafe fn get_display_mode_properties_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModePropertiesKHR,
) -> Result {
(self.get_display_mode_properties_khr)(
physical_device,
display,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDisplayModeKHR.html>"]
pub unsafe fn create_display_mode_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
p_create_info: *const DisplayModeCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_mode: *mut DisplayModeKHR,
) -> Result {
(self.create_display_mode_khr)(physical_device, display, p_create_info, p_allocator, p_mode)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDisplayPlaneCapabilitiesKHR.html>"]
pub unsafe fn get_display_plane_capabilities_khr(
&self,
physical_device: PhysicalDevice,
mode: DisplayModeKHR,
plane_index: u32,
p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
) -> Result {
(self.get_display_plane_capabilities_khr)(
physical_device,
mode,
plane_index,
p_capabilities,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDisplayPlaneSurfaceKHR.html>"]
pub unsafe fn create_display_plane_surface_khr(
&self,
instance: Instance,
p_create_info: *const DisplaySurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_display_plane_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_KHR_display\'"]
impl StructureType {
pub const DISPLAY_MODE_CREATE_INFO_KHR: Self = StructureType(1000002000);
}
#[doc = "Generated from \'VK_KHR_display\'"]
impl StructureType {
pub const DISPLAY_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000002001);
}
#[doc = "Generated from \'VK_KHR_display\'"]
impl ObjectType {
pub const DISPLAY_KHR: Self = ObjectType(1000002000);
}
#[doc = "Generated from \'VK_KHR_display\'"]
impl ObjectType {
pub const DISPLAY_MODE_KHR: Self = ObjectType(1000002001);
}
impl KhrDisplaySwapchainFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_display_swapchain\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateSharedSwapchainsKHR = extern "system" fn(
device: Device,
swapchain_count: u32,
p_create_infos: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchains: *mut SwapchainKHR,
) -> Result;
pub struct KhrDisplaySwapchainFn {
pub create_shared_swapchains_khr: extern "system" fn(
device: Device,
swapchain_count: u32,
p_create_infos: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchains: *mut SwapchainKHR,
) -> Result,
}
unsafe impl Send for KhrDisplaySwapchainFn {}
unsafe impl Sync for KhrDisplaySwapchainFn {}
impl ::std::clone::Clone for KhrDisplaySwapchainFn {
fn clone(&self) -> Self {
KhrDisplaySwapchainFn {
create_shared_swapchains_khr: self.create_shared_swapchains_khr,
}
}
}
impl KhrDisplaySwapchainFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDisplaySwapchainFn {
create_shared_swapchains_khr: unsafe {
extern "system" fn create_shared_swapchains_khr(
_device: Device,
_swapchain_count: u32,
_p_create_infos: *const SwapchainCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_swapchains: *mut SwapchainKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_shared_swapchains_khr)
))
}
let raw_name = stringify!(vkCreateSharedSwapchainsKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_shared_swapchains_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateSharedSwapchainsKHR.html>"]
pub unsafe fn create_shared_swapchains_khr(
&self,
device: Device,
swapchain_count: u32,
p_create_infos: *const SwapchainCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_swapchains: *mut SwapchainKHR,
) -> Result {
(self.create_shared_swapchains_khr)(
device,
swapchain_count,
p_create_infos,
p_allocator,
p_swapchains,
)
}
}
#[doc = "Generated from \'VK_KHR_display_swapchain\'"]
impl StructureType {
pub const DISPLAY_PRESENT_INFO_KHR: Self = StructureType(1000003000);
}
#[doc = "Generated from \'VK_KHR_display_swapchain\'"]
impl Result {
pub const ERROR_INCOMPATIBLE_DISPLAY_KHR: Self = Result(-1000003001);
}
impl KhrXlibSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_xlib_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateXlibSurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const XlibSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR = extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
dpy: *mut Display,
visual_id: VisualID,
) -> Bool32;
pub struct KhrXlibSurfaceFn {
pub create_xlib_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const XlibSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
pub get_physical_device_xlib_presentation_support_khr: extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
dpy: *mut Display,
visual_id: VisualID,
) -> Bool32,
}
unsafe impl Send for KhrXlibSurfaceFn {}
unsafe impl Sync for KhrXlibSurfaceFn {}
impl ::std::clone::Clone for KhrXlibSurfaceFn {
fn clone(&self) -> Self {
KhrXlibSurfaceFn {
create_xlib_surface_khr: self.create_xlib_surface_khr,
get_physical_device_xlib_presentation_support_khr: self
.get_physical_device_xlib_presentation_support_khr,
}
}
}
impl KhrXlibSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrXlibSurfaceFn {
create_xlib_surface_khr: unsafe {
extern "system" fn create_xlib_surface_khr(
_instance: Instance,
_p_create_info: *const XlibSurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_xlib_surface_khr)
))
}
let raw_name = stringify!(vkCreateXlibSurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_xlib_surface_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_xlib_presentation_support_khr: unsafe {
extern "system" fn get_physical_device_xlib_presentation_support_khr(
_physical_device: PhysicalDevice,
_queue_family_index: u32,
_dpy: *mut Display,
_visual_id: VisualID,
) -> Bool32 {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_xlib_presentation_support_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceXlibPresentationSupportKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_xlib_presentation_support_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateXlibSurfaceKHR.html>"]
pub unsafe fn create_xlib_surface_khr(
&self,
instance: Instance,
p_create_info: *const XlibSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_xlib_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html>"]
pub unsafe fn get_physical_device_xlib_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
dpy: *mut Display,
visual_id: VisualID,
) -> Bool32 {
(self.get_physical_device_xlib_presentation_support_khr)(
physical_device,
queue_family_index,
dpy,
visual_id,
)
}
}
#[doc = "Generated from \'VK_KHR_xlib_surface\'"]
impl StructureType {
pub const XLIB_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000004000);
}
impl KhrXcbSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_xcb_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateXcbSurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const XcbSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR = extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
connection: *mut xcb_connection_t,
visual_id: xcb_visualid_t,
) -> Bool32;
pub struct KhrXcbSurfaceFn {
pub create_xcb_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const XcbSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
pub get_physical_device_xcb_presentation_support_khr: extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
connection: *mut xcb_connection_t,
visual_id: xcb_visualid_t,
) -> Bool32,
}
unsafe impl Send for KhrXcbSurfaceFn {}
unsafe impl Sync for KhrXcbSurfaceFn {}
impl ::std::clone::Clone for KhrXcbSurfaceFn {
fn clone(&self) -> Self {
KhrXcbSurfaceFn {
create_xcb_surface_khr: self.create_xcb_surface_khr,
get_physical_device_xcb_presentation_support_khr: self
.get_physical_device_xcb_presentation_support_khr,
}
}
}
impl KhrXcbSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrXcbSurfaceFn {
create_xcb_surface_khr: unsafe {
extern "system" fn create_xcb_surface_khr(
_instance: Instance,
_p_create_info: *const XcbSurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_xcb_surface_khr)
))
}
let raw_name = stringify!(vkCreateXcbSurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_xcb_surface_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_xcb_presentation_support_khr: unsafe {
extern "system" fn get_physical_device_xcb_presentation_support_khr(
_physical_device: PhysicalDevice,
_queue_family_index: u32,
_connection: *mut xcb_connection_t,
_visual_id: xcb_visualid_t,
) -> Bool32 {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_xcb_presentation_support_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceXcbPresentationSupportKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_xcb_presentation_support_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateXcbSurfaceKHR.html>"]
pub unsafe fn create_xcb_surface_khr(
&self,
instance: Instance,
p_create_info: *const XcbSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_xcb_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html>"]
pub unsafe fn get_physical_device_xcb_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
connection: *mut xcb_connection_t,
visual_id: xcb_visualid_t,
) -> Bool32 {
(self.get_physical_device_xcb_presentation_support_khr)(
physical_device,
queue_family_index,
connection,
visual_id,
)
}
}
#[doc = "Generated from \'VK_KHR_xcb_surface\'"]
impl StructureType {
pub const XCB_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000005000);
}
impl KhrWaylandSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_wayland_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateWaylandSurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const WaylandSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR = extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
display: *mut wl_display,
) -> Bool32;
pub struct KhrWaylandSurfaceFn {
pub create_wayland_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const WaylandSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
pub get_physical_device_wayland_presentation_support_khr: extern "system" fn(
physical_device: PhysicalDevice,
queue_family_index: u32,
display: *mut wl_display,
) -> Bool32,
}
unsafe impl Send for KhrWaylandSurfaceFn {}
unsafe impl Sync for KhrWaylandSurfaceFn {}
impl ::std::clone::Clone for KhrWaylandSurfaceFn {
fn clone(&self) -> Self {
KhrWaylandSurfaceFn {
create_wayland_surface_khr: self.create_wayland_surface_khr,
get_physical_device_wayland_presentation_support_khr: self
.get_physical_device_wayland_presentation_support_khr,
}
}
}
impl KhrWaylandSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrWaylandSurfaceFn {
create_wayland_surface_khr: unsafe {
extern "system" fn create_wayland_surface_khr(
_instance: Instance,
_p_create_info: *const WaylandSurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_wayland_surface_khr)
))
}
let raw_name = stringify!(vkCreateWaylandSurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_wayland_surface_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_wayland_presentation_support_khr: unsafe {
extern "system" fn get_physical_device_wayland_presentation_support_khr(
_physical_device: PhysicalDevice,
_queue_family_index: u32,
_display: *mut wl_display,
) -> Bool32 {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_wayland_presentation_support_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceWaylandPresentationSupportKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_wayland_presentation_support_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateWaylandSurfaceKHR.html>"]
pub unsafe fn create_wayland_surface_khr(
&self,
instance: Instance,
p_create_info: *const WaylandSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_wayland_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html>"]
pub unsafe fn get_physical_device_wayland_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
display: *mut wl_display,
) -> Bool32 {
(self.get_physical_device_wayland_presentation_support_khr)(
physical_device,
queue_family_index,
display,
)
}
}
#[doc = "Generated from \'VK_KHR_wayland_surface\'"]
impl StructureType {
pub const WAYLAND_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000006000);
}
impl KhrMirSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_mir_surface\0")
.expect("Wrong extension string")
}
}
pub struct KhrMirSurfaceFn {}
unsafe impl Send for KhrMirSurfaceFn {}
unsafe impl Sync for KhrMirSurfaceFn {}
impl ::std::clone::Clone for KhrMirSurfaceFn {
fn clone(&self) -> Self {
KhrMirSurfaceFn {}
}
}
impl KhrMirSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrMirSurfaceFn {}
}
}
impl KhrAndroidSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_android_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateAndroidSurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const AndroidSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct KhrAndroidSurfaceFn {
pub create_android_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const AndroidSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for KhrAndroidSurfaceFn {}
unsafe impl Sync for KhrAndroidSurfaceFn {}
impl ::std::clone::Clone for KhrAndroidSurfaceFn {
fn clone(&self) -> Self {
KhrAndroidSurfaceFn {
create_android_surface_khr: self.create_android_surface_khr,
}
}
}
impl KhrAndroidSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrAndroidSurfaceFn {
create_android_surface_khr: unsafe {
extern "system" fn create_android_surface_khr(
_instance: Instance,
_p_create_info: *const AndroidSurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_android_surface_khr)
))
}
let raw_name = stringify!(vkCreateAndroidSurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_android_surface_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateAndroidSurfaceKHR.html>"]
pub unsafe fn create_android_surface_khr(
&self,
instance: Instance,
p_create_info: *const AndroidSurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_android_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_KHR_android_surface\'"]
impl StructureType {
pub const ANDROID_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000008000);
}
impl KhrWin32SurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_win32_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateWin32SurfaceKHR = extern "system" fn(
instance: Instance,
p_create_info: *const Win32SurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR =
extern "system" fn(physical_device: PhysicalDevice, queue_family_index: u32) -> Bool32;
pub struct KhrWin32SurfaceFn {
pub create_win32_surface_khr: extern "system" fn(
instance: Instance,
p_create_info: *const Win32SurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
pub get_physical_device_win32_presentation_support_khr:
extern "system" fn(physical_device: PhysicalDevice, queue_family_index: u32) -> Bool32,
}
unsafe impl Send for KhrWin32SurfaceFn {}
unsafe impl Sync for KhrWin32SurfaceFn {}
impl ::std::clone::Clone for KhrWin32SurfaceFn {
fn clone(&self) -> Self {
KhrWin32SurfaceFn {
create_win32_surface_khr: self.create_win32_surface_khr,
get_physical_device_win32_presentation_support_khr: self
.get_physical_device_win32_presentation_support_khr,
}
}
}
impl KhrWin32SurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrWin32SurfaceFn {
create_win32_surface_khr: unsafe {
extern "system" fn create_win32_surface_khr(
_instance: Instance,
_p_create_info: *const Win32SurfaceCreateInfoKHR,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_win32_surface_khr)
))
}
let raw_name = stringify!(vkCreateWin32SurfaceKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_win32_surface_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_win32_presentation_support_khr: unsafe {
extern "system" fn get_physical_device_win32_presentation_support_khr(
_physical_device: PhysicalDevice,
_queue_family_index: u32,
) -> Bool32 {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_win32_presentation_support_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceWin32PresentationSupportKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_win32_presentation_support_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateWin32SurfaceKHR.html>"]
pub unsafe fn create_win32_surface_khr(
&self,
instance: Instance,
p_create_info: *const Win32SurfaceCreateInfoKHR,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_win32_surface_khr)(instance, p_create_info, p_allocator, p_surface)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html>"]
pub unsafe fn get_physical_device_win32_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
) -> Bool32 {
(self.get_physical_device_win32_presentation_support_khr)(
physical_device,
queue_family_index,
)
}
}
#[doc = "Generated from \'VK_KHR_win32_surface\'"]
impl StructureType {
pub const WIN32_SURFACE_CREATE_INFO_KHR: Self = StructureType(1000009000);
}
impl AndroidNativeBufferFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_ANDROID_native_buffer\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetSwapchainGrallocUsageANDROID = extern "system" fn(
device: Device,
format: Format,
image_usage: ImageUsageFlags,
gralloc_usage: *mut c_int,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkAcquireImageANDROID = extern "system" fn(
device: Device,
image: Image,
native_fence_fd: c_int,
semaphore: Semaphore,
fence: Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueSignalReleaseImageANDROID = extern "system" fn(
queue: Queue,
wait_semaphore_count: u32,
p_wait_semaphores: *const Semaphore,
image: Image,
p_native_fence_fd: *mut c_int,
) -> Result;
pub struct AndroidNativeBufferFn {
pub get_swapchain_gralloc_usage_android: extern "system" fn(
device: Device,
format: Format,
image_usage: ImageUsageFlags,
gralloc_usage: *mut c_int,
) -> Result,
pub acquire_image_android: extern "system" fn(
device: Device,
image: Image,
native_fence_fd: c_int,
semaphore: Semaphore,
fence: Fence,
) -> Result,
pub queue_signal_release_image_android: extern "system" fn(
queue: Queue,
wait_semaphore_count: u32,
p_wait_semaphores: *const Semaphore,
image: Image,
p_native_fence_fd: *mut c_int,
) -> Result,
}
unsafe impl Send for AndroidNativeBufferFn {}
unsafe impl Sync for AndroidNativeBufferFn {}
impl ::std::clone::Clone for AndroidNativeBufferFn {
fn clone(&self) -> Self {
AndroidNativeBufferFn {
get_swapchain_gralloc_usage_android: self.get_swapchain_gralloc_usage_android,
acquire_image_android: self.acquire_image_android,
queue_signal_release_image_android: self.queue_signal_release_image_android,
}
}
}
impl AndroidNativeBufferFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AndroidNativeBufferFn {
get_swapchain_gralloc_usage_android: unsafe {
extern "system" fn get_swapchain_gralloc_usage_android(
_device: Device,
_format: Format,
_image_usage: ImageUsageFlags,
_gralloc_usage: *mut c_int,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_gralloc_usage_android)
))
}
let raw_name = stringify!(vkGetSwapchainGrallocUsageANDROID);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_swapchain_gralloc_usage_android
} else {
::std::mem::transmute(val)
}
},
acquire_image_android: unsafe {
extern "system" fn acquire_image_android(
_device: Device,
_image: Image,
_native_fence_fd: c_int,
_semaphore: Semaphore,
_fence: Fence,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_image_android)
))
}
let raw_name = stringify!(vkAcquireImageANDROID);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
acquire_image_android
} else {
::std::mem::transmute(val)
}
},
queue_signal_release_image_android: unsafe {
extern "system" fn queue_signal_release_image_android(
_queue: Queue,
_wait_semaphore_count: u32,
_p_wait_semaphores: *const Semaphore,
_image: Image,
_p_native_fence_fd: *mut c_int,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(queue_signal_release_image_android)
))
}
let raw_name = stringify!(vkQueueSignalReleaseImageANDROID);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_signal_release_image_android
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSwapchainGrallocUsageANDROID.html>"]
pub unsafe fn get_swapchain_gralloc_usage_android(
&self,
device: Device,
format: Format,
image_usage: ImageUsageFlags,
gralloc_usage: *mut c_int,
) -> Result {
(self.get_swapchain_gralloc_usage_android)(device, format, image_usage, gralloc_usage)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAcquireImageANDROID.html>"]
pub unsafe fn acquire_image_android(
&self,
device: Device,
image: Image,
native_fence_fd: c_int,
semaphore: Semaphore,
fence: Fence,
) -> Result {
(self.acquire_image_android)(device, image, native_fence_fd, semaphore, fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueSignalReleaseImageANDROID.html>"]
pub unsafe fn queue_signal_release_image_android(
&self,
queue: Queue,
wait_semaphore_count: u32,
p_wait_semaphores: *const Semaphore,
image: Image,
p_native_fence_fd: *mut c_int,
) -> Result {
(self.queue_signal_release_image_android)(
queue,
wait_semaphore_count,
p_wait_semaphores,
image,
p_native_fence_fd,
)
}
}
#[doc = "Generated from \'VK_ANDROID_native_buffer\'"]
impl StructureType {
pub const NATIVE_BUFFER_ANDROID: Self = StructureType(1000010000);
}
impl ExtDebugReportFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_debug_report\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDebugReportCallbackEXT = extern "system" fn(
instance: Instance,
p_create_info: *const DebugReportCallbackCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_callback: *mut DebugReportCallbackEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDebugReportCallbackEXT = extern "system" fn(
instance: Instance,
callback: DebugReportCallbackEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkDebugReportMessageEXT = extern "system" fn(
instance: Instance,
flags: DebugReportFlagsEXT,
object_type: DebugReportObjectTypeEXT,
object: u64,
location: usize,
message_code: i32,
p_layer_prefix: *const c_char,
p_message: *const c_char,
) -> c_void;
pub struct ExtDebugReportFn {
pub create_debug_report_callback_ext: extern "system" fn(
instance: Instance,
p_create_info: *const DebugReportCallbackCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_callback: *mut DebugReportCallbackEXT,
) -> Result,
pub destroy_debug_report_callback_ext: extern "system" fn(
instance: Instance,
callback: DebugReportCallbackEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub debug_report_message_ext: extern "system" fn(
instance: Instance,
flags: DebugReportFlagsEXT,
object_type: DebugReportObjectTypeEXT,
object: u64,
location: usize,
message_code: i32,
p_layer_prefix: *const c_char,
p_message: *const c_char,
) -> c_void,
}
unsafe impl Send for ExtDebugReportFn {}
unsafe impl Sync for ExtDebugReportFn {}
impl ::std::clone::Clone for ExtDebugReportFn {
fn clone(&self) -> Self {
ExtDebugReportFn {
create_debug_report_callback_ext: self.create_debug_report_callback_ext,
destroy_debug_report_callback_ext: self.destroy_debug_report_callback_ext,
debug_report_message_ext: self.debug_report_message_ext,
}
}
}
impl ExtDebugReportFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDebugReportFn {
create_debug_report_callback_ext: unsafe {
extern "system" fn create_debug_report_callback_ext(
_instance: Instance,
_p_create_info: *const DebugReportCallbackCreateInfoEXT,
_p_allocator: *const AllocationCallbacks,
_p_callback: *mut DebugReportCallbackEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_debug_report_callback_ext)
))
}
let raw_name = stringify!(vkCreateDebugReportCallbackEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_debug_report_callback_ext
} else {
::std::mem::transmute(val)
}
},
destroy_debug_report_callback_ext: unsafe {
extern "system" fn destroy_debug_report_callback_ext(
_instance: Instance,
_callback: DebugReportCallbackEXT,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_debug_report_callback_ext)
))
}
let raw_name = stringify!(vkDestroyDebugReportCallbackEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_debug_report_callback_ext
} else {
::std::mem::transmute(val)
}
},
debug_report_message_ext: unsafe {
extern "system" fn debug_report_message_ext(
_instance: Instance,
_flags: DebugReportFlagsEXT,
_object_type: DebugReportObjectTypeEXT,
_object: u64,
_location: usize,
_message_code: i32,
_p_layer_prefix: *const c_char,
_p_message: *const c_char,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(debug_report_message_ext)
))
}
let raw_name = stringify!(vkDebugReportMessageEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
debug_report_message_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDebugReportCallbackEXT.html>"]
pub unsafe fn create_debug_report_callback_ext(
&self,
instance: Instance,
p_create_info: *const DebugReportCallbackCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_callback: *mut DebugReportCallbackEXT,
) -> Result {
(self.create_debug_report_callback_ext)(instance, p_create_info, p_allocator, p_callback)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDebugReportCallbackEXT.html>"]
pub unsafe fn destroy_debug_report_callback_ext(
&self,
instance: Instance,
callback: DebugReportCallbackEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_debug_report_callback_ext)(instance, callback, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDebugReportMessageEXT.html>"]
pub unsafe fn debug_report_message_ext(
&self,
instance: Instance,
flags: DebugReportFlagsEXT,
object_type: DebugReportObjectTypeEXT,
object: u64,
location: usize,
message_code: i32,
p_layer_prefix: *const c_char,
p_message: *const c_char,
) -> c_void {
(self.debug_report_message_ext)(
instance,
flags,
object_type,
object,
location,
message_code,
p_layer_prefix,
p_message,
)
}
}
#[doc = "Generated from \'VK_EXT_debug_report\'"]
impl StructureType {
pub const DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: Self = StructureType(1000011000);
}
#[doc = "Generated from \'VK_EXT_debug_report\'"]
impl Result {
pub const ERROR_VALIDATION_FAILED_EXT: Self = Result(-1000011001);
}
#[doc = "Generated from \'VK_EXT_debug_report\'"]
impl ObjectType {
pub const DEBUG_REPORT_CALLBACK_EXT: Self = ObjectType(1000011000);
}
#[doc = "Generated from \'VK_EXT_debug_report\'"]
impl DebugReportObjectTypeEXT {
pub const SAMPLER_YCBCR_CONVERSION: Self = DebugReportObjectTypeEXT(1000156000);
}
#[doc = "Generated from \'VK_EXT_debug_report\'"]
impl DebugReportObjectTypeEXT {
pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = DebugReportObjectTypeEXT(1000085000);
}
impl NvGlslShaderFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_glsl_shader\0")
.expect("Wrong extension string")
}
}
pub struct NvGlslShaderFn {}
unsafe impl Send for NvGlslShaderFn {}
unsafe impl Sync for NvGlslShaderFn {}
impl ::std::clone::Clone for NvGlslShaderFn {
fn clone(&self) -> Self {
NvGlslShaderFn {}
}
}
impl NvGlslShaderFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvGlslShaderFn {}
}
}
#[doc = "Generated from \'VK_NV_glsl_shader\'"]
impl Result {
pub const ERROR_INVALID_SHADER_NV: Self = Result(-1000012000);
}
impl ExtDepthRangeUnrestrictedFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_depth_range_unrestricted\0")
.expect("Wrong extension string")
}
}
pub struct ExtDepthRangeUnrestrictedFn {}
unsafe impl Send for ExtDepthRangeUnrestrictedFn {}
unsafe impl Sync for ExtDepthRangeUnrestrictedFn {}
impl ::std::clone::Clone for ExtDepthRangeUnrestrictedFn {
fn clone(&self) -> Self {
ExtDepthRangeUnrestrictedFn {}
}
}
impl ExtDepthRangeUnrestrictedFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDepthRangeUnrestrictedFn {}
}
}
impl KhrSamplerMirrorClampToEdgeFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_sampler_mirror_clamp_to_edge\0")
.expect("Wrong extension string")
}
}
pub struct KhrSamplerMirrorClampToEdgeFn {}
unsafe impl Send for KhrSamplerMirrorClampToEdgeFn {}
unsafe impl Sync for KhrSamplerMirrorClampToEdgeFn {}
impl ::std::clone::Clone for KhrSamplerMirrorClampToEdgeFn {
fn clone(&self) -> Self {
KhrSamplerMirrorClampToEdgeFn {}
}
}
impl KhrSamplerMirrorClampToEdgeFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSamplerMirrorClampToEdgeFn {}
}
}
impl ImgFilterCubicFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_filter_cubic\0")
.expect("Wrong extension string")
}
}
pub struct ImgFilterCubicFn {}
unsafe impl Send for ImgFilterCubicFn {}
unsafe impl Sync for ImgFilterCubicFn {}
impl ::std::clone::Clone for ImgFilterCubicFn {
fn clone(&self) -> Self {
ImgFilterCubicFn {}
}
}
impl ImgFilterCubicFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgFilterCubicFn {}
}
}
#[doc = "Generated from \'VK_IMG_filter_cubic\'"]
impl Filter {
pub const CUBIC_IMG: Self = Filter(1000015000);
}
#[doc = "Generated from \'VK_IMG_filter_cubic\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_FILTER_CUBIC_IMG: Self = FormatFeatureFlags(0b10000000000000);
}
impl AmdExtension17Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_17\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension17Fn {}
unsafe impl Send for AmdExtension17Fn {}
unsafe impl Sync for AmdExtension17Fn {}
impl ::std::clone::Clone for AmdExtension17Fn {
fn clone(&self) -> Self {
AmdExtension17Fn {}
}
}
impl AmdExtension17Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension17Fn {}
}
}
impl AmdExtension18Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_18\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension18Fn {}
unsafe impl Send for AmdExtension18Fn {}
unsafe impl Sync for AmdExtension18Fn {}
impl ::std::clone::Clone for AmdExtension18Fn {
fn clone(&self) -> Self {
AmdExtension18Fn {}
}
}
impl AmdExtension18Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension18Fn {}
}
}
impl AmdRasterizationOrderFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_rasterization_order\0")
.expect("Wrong extension string")
}
}
pub struct AmdRasterizationOrderFn {}
unsafe impl Send for AmdRasterizationOrderFn {}
unsafe impl Sync for AmdRasterizationOrderFn {}
impl ::std::clone::Clone for AmdRasterizationOrderFn {
fn clone(&self) -> Self {
AmdRasterizationOrderFn {}
}
}
impl AmdRasterizationOrderFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdRasterizationOrderFn {}
}
}
#[doc = "Generated from \'VK_AMD_rasterization_order\'"]
impl StructureType {
pub const PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: Self =
StructureType(1000018000);
}
impl AmdExtension20Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_20\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension20Fn {}
unsafe impl Send for AmdExtension20Fn {}
unsafe impl Sync for AmdExtension20Fn {}
impl ::std::clone::Clone for AmdExtension20Fn {
fn clone(&self) -> Self {
AmdExtension20Fn {}
}
}
impl AmdExtension20Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension20Fn {}
}
}
impl AmdShaderTrinaryMinmaxFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_trinary_minmax\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderTrinaryMinmaxFn {}
unsafe impl Send for AmdShaderTrinaryMinmaxFn {}
unsafe impl Sync for AmdShaderTrinaryMinmaxFn {}
impl ::std::clone::Clone for AmdShaderTrinaryMinmaxFn {
fn clone(&self) -> Self {
AmdShaderTrinaryMinmaxFn {}
}
}
impl AmdShaderTrinaryMinmaxFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderTrinaryMinmaxFn {}
}
}
impl AmdShaderExplicitVertexParameterFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_explicit_vertex_parameter\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderExplicitVertexParameterFn {}
unsafe impl Send for AmdShaderExplicitVertexParameterFn {}
unsafe impl Sync for AmdShaderExplicitVertexParameterFn {}
impl ::std::clone::Clone for AmdShaderExplicitVertexParameterFn {
fn clone(&self) -> Self {
AmdShaderExplicitVertexParameterFn {}
}
}
impl AmdShaderExplicitVertexParameterFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderExplicitVertexParameterFn {}
}
}
impl ExtDebugMarkerFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_debug_marker\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkDebugMarkerSetObjectTagEXT =
extern "system" fn(device: Device, p_tag_info: *const DebugMarkerObjectTagInfoEXT) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDebugMarkerSetObjectNameEXT =
extern "system" fn(device: Device, p_name_info: *const DebugMarkerObjectNameInfoEXT) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDebugMarkerBeginEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDebugMarkerEndEXT = extern "system" fn(command_buffer: CommandBuffer) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDebugMarkerInsertEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void;
pub struct ExtDebugMarkerFn {
pub debug_marker_set_object_tag_ext: extern "system" fn(
device: Device,
p_tag_info: *const DebugMarkerObjectTagInfoEXT,
) -> Result,
pub debug_marker_set_object_name_ext: extern "system" fn(
device: Device,
p_name_info: *const DebugMarkerObjectNameInfoEXT,
) -> Result,
pub cmd_debug_marker_begin_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void,
pub cmd_debug_marker_end_ext: extern "system" fn(command_buffer: CommandBuffer) -> c_void,
pub cmd_debug_marker_insert_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void,
}
unsafe impl Send for ExtDebugMarkerFn {}
unsafe impl Sync for ExtDebugMarkerFn {}
impl ::std::clone::Clone for ExtDebugMarkerFn {
fn clone(&self) -> Self {
ExtDebugMarkerFn {
debug_marker_set_object_tag_ext: self.debug_marker_set_object_tag_ext,
debug_marker_set_object_name_ext: self.debug_marker_set_object_name_ext,
cmd_debug_marker_begin_ext: self.cmd_debug_marker_begin_ext,
cmd_debug_marker_end_ext: self.cmd_debug_marker_end_ext,
cmd_debug_marker_insert_ext: self.cmd_debug_marker_insert_ext,
}
}
}
impl ExtDebugMarkerFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDebugMarkerFn {
debug_marker_set_object_tag_ext: unsafe {
extern "system" fn debug_marker_set_object_tag_ext(
_device: Device,
_p_tag_info: *const DebugMarkerObjectTagInfoEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(debug_marker_set_object_tag_ext)
))
}
let raw_name = stringify!(vkDebugMarkerSetObjectTagEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
debug_marker_set_object_tag_ext
} else {
::std::mem::transmute(val)
}
},
debug_marker_set_object_name_ext: unsafe {
extern "system" fn debug_marker_set_object_name_ext(
_device: Device,
_p_name_info: *const DebugMarkerObjectNameInfoEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(debug_marker_set_object_name_ext)
))
}
let raw_name = stringify!(vkDebugMarkerSetObjectNameEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
debug_marker_set_object_name_ext
} else {
::std::mem::transmute(val)
}
},
cmd_debug_marker_begin_ext: unsafe {
extern "system" fn cmd_debug_marker_begin_ext(
_command_buffer: CommandBuffer,
_p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_begin_ext)
))
}
let raw_name = stringify!(vkCmdDebugMarkerBeginEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_debug_marker_begin_ext
} else {
::std::mem::transmute(val)
}
},
cmd_debug_marker_end_ext: unsafe {
extern "system" fn cmd_debug_marker_end_ext(
_command_buffer: CommandBuffer,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_end_ext)
))
}
let raw_name = stringify!(vkCmdDebugMarkerEndEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_debug_marker_end_ext
} else {
::std::mem::transmute(val)
}
},
cmd_debug_marker_insert_ext: unsafe {
extern "system" fn cmd_debug_marker_insert_ext(
_command_buffer: CommandBuffer,
_p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_insert_ext)
))
}
let raw_name = stringify!(vkCmdDebugMarkerInsertEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_debug_marker_insert_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDebugMarkerSetObjectTagEXT.html>"]
pub unsafe fn debug_marker_set_object_tag_ext(
&self,
device: Device,
p_tag_info: *const DebugMarkerObjectTagInfoEXT,
) -> Result {
(self.debug_marker_set_object_tag_ext)(device, p_tag_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDebugMarkerSetObjectNameEXT.html>"]
pub unsafe fn debug_marker_set_object_name_ext(
&self,
device: Device,
p_name_info: *const DebugMarkerObjectNameInfoEXT,
) -> Result {
(self.debug_marker_set_object_name_ext)(device, p_name_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDebugMarkerBeginEXT.html>"]
pub unsafe fn cmd_debug_marker_begin_ext(
&self,
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void {
(self.cmd_debug_marker_begin_ext)(command_buffer, p_marker_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDebugMarkerEndEXT.html>"]
pub unsafe fn cmd_debug_marker_end_ext(&self, command_buffer: CommandBuffer) -> c_void {
(self.cmd_debug_marker_end_ext)(command_buffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDebugMarkerInsertEXT.html>"]
pub unsafe fn cmd_debug_marker_insert_ext(
&self,
command_buffer: CommandBuffer,
p_marker_info: *const DebugMarkerMarkerInfoEXT,
) -> c_void {
(self.cmd_debug_marker_insert_ext)(command_buffer, p_marker_info)
}
}
#[doc = "Generated from \'VK_EXT_debug_marker\'"]
impl StructureType {
pub const DEBUG_MARKER_OBJECT_NAME_INFO_EXT: Self = StructureType(1000022000);
}
#[doc = "Generated from \'VK_EXT_debug_marker\'"]
impl StructureType {
pub const DEBUG_MARKER_OBJECT_TAG_INFO_EXT: Self = StructureType(1000022001);
}
#[doc = "Generated from \'VK_EXT_debug_marker\'"]
impl StructureType {
pub const DEBUG_MARKER_MARKER_INFO_EXT: Self = StructureType(1000022002);
}
impl AmdExtension24Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_24\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension24Fn {}
unsafe impl Send for AmdExtension24Fn {}
unsafe impl Sync for AmdExtension24Fn {}
impl ::std::clone::Clone for AmdExtension24Fn {
fn clone(&self) -> Self {
AmdExtension24Fn {}
}
}
impl AmdExtension24Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension24Fn {}
}
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl QueueFlags {
pub const RESERVED_6_KHR: Self = QueueFlags(0b1000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl PipelineStageFlags {
pub const RESERVED_27_KHR: Self = PipelineStageFlags(0b1000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl AccessFlags {
pub const RESERVED_30_KHR: Self = AccessFlags(0b1000000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl AccessFlags {
pub const RESERVED_31_KHR: Self = AccessFlags(0b10000000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl BufferUsageFlags {
pub const RESERVED_15_KHR: Self = BufferUsageFlags(0b1000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl BufferUsageFlags {
pub const RESERVED_16_KHR: Self = BufferUsageFlags(0b10000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl ImageUsageFlags {
pub const RESERVED_13_KHR: Self = ImageUsageFlags(0b10000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl ImageUsageFlags {
pub const RESERVED_14_KHR: Self = ImageUsageFlags(0b100000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl ImageUsageFlags {
pub const RESERVED_15_KHR: Self = ImageUsageFlags(0b1000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl FormatFeatureFlags {
pub const RESERVED_27_KHR: Self = FormatFeatureFlags(0b1000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl FormatFeatureFlags {
pub const RESERVED_28_KHR: Self = FormatFeatureFlags(0b10000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_24\'"]
impl QueryType {
pub const RESERVED_8: Self = QueryType(1000023008);
}
impl AmdExtension25Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_25\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension25Fn {}
unsafe impl Send for AmdExtension25Fn {}
unsafe impl Sync for AmdExtension25Fn {}
impl ::std::clone::Clone for AmdExtension25Fn {
fn clone(&self) -> Self {
AmdExtension25Fn {}
}
}
impl AmdExtension25Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension25Fn {}
}
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl QueueFlags {
pub const RESERVED_5_KHR: Self = QueueFlags(0b100000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl PipelineStageFlags {
pub const RESERVED_26_KHR: Self = PipelineStageFlags(0b100000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl AccessFlags {
pub const RESERVED_28_KHR: Self = AccessFlags(0b10000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl AccessFlags {
pub const RESERVED_29_KHR: Self = AccessFlags(0b100000000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl BufferUsageFlags {
pub const RESERVED_13_KHR: Self = BufferUsageFlags(0b10000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl BufferUsageFlags {
pub const RESERVED_14_KHR: Self = BufferUsageFlags(0b100000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl ImageUsageFlags {
pub const RESERVED_10_KHR: Self = ImageUsageFlags(0b10000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl ImageUsageFlags {
pub const RESERVED_11_KHR: Self = ImageUsageFlags(0b100000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl ImageUsageFlags {
pub const RESERVED_12_KHR: Self = ImageUsageFlags(0b1000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl FormatFeatureFlags {
pub const RESERVED_25_KHR: Self = FormatFeatureFlags(0b10000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl FormatFeatureFlags {
pub const RESERVED_26_KHR: Self = FormatFeatureFlags(0b100000000000000000000000000);
}
#[doc = "Generated from \'VK_AMD_extension_25\'"]
impl QueryType {
pub const RESERVED_4: Self = QueryType(1000024004);
}
impl AmdGcnShaderFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_gcn_shader\0")
.expect("Wrong extension string")
}
}
pub struct AmdGcnShaderFn {}
unsafe impl Send for AmdGcnShaderFn {}
unsafe impl Sync for AmdGcnShaderFn {}
impl ::std::clone::Clone for AmdGcnShaderFn {
fn clone(&self) -> Self {
AmdGcnShaderFn {}
}
}
impl AmdGcnShaderFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdGcnShaderFn {}
}
}
impl NvDedicatedAllocationFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_dedicated_allocation\0")
.expect("Wrong extension string")
}
}
pub struct NvDedicatedAllocationFn {}
unsafe impl Send for NvDedicatedAllocationFn {}
unsafe impl Sync for NvDedicatedAllocationFn {}
impl ::std::clone::Clone for NvDedicatedAllocationFn {
fn clone(&self) -> Self {
NvDedicatedAllocationFn {}
}
}
impl NvDedicatedAllocationFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvDedicatedAllocationFn {}
}
}
#[doc = "Generated from \'VK_NV_dedicated_allocation\'"]
impl StructureType {
pub const DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: Self = StructureType(1000026000);
}
#[doc = "Generated from \'VK_NV_dedicated_allocation\'"]
impl StructureType {
pub const DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: Self = StructureType(1000026001);
}
#[doc = "Generated from \'VK_NV_dedicated_allocation\'"]
impl StructureType {
pub const DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: Self = StructureType(1000026002);
}
impl ExtExtension28Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_28\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension28Fn {}
unsafe impl Send for ExtExtension28Fn {}
unsafe impl Sync for ExtExtension28Fn {}
impl ::std::clone::Clone for ExtExtension28Fn {
fn clone(&self) -> Self {
ExtExtension28Fn {}
}
}
impl ExtExtension28Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension28Fn {}
}
}
impl ExtTransformFeedbackFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_transform_feedback\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindTransformFeedbackBuffersEXT = extern "system" fn(
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
p_sizes: *const DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginTransformFeedbackEXT = extern "system" fn(
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndTransformFeedbackEXT = extern "system" fn(
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginQueryIndexedEXT = extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
index: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndQueryIndexedEXT = extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
index: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndirectByteCountEXT = extern "system" fn(
command_buffer: CommandBuffer,
instance_count: u32,
first_instance: u32,
counter_buffer: Buffer,
counter_buffer_offset: DeviceSize,
counter_offset: u32,
vertex_stride: u32,
) -> c_void;
pub struct ExtTransformFeedbackFn {
pub cmd_bind_transform_feedback_buffers_ext: extern "system" fn(
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
p_sizes: *const DeviceSize,
) -> c_void,
pub cmd_begin_transform_feedback_ext: extern "system" fn(
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void,
pub cmd_end_transform_feedback_ext: extern "system" fn(
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void,
pub cmd_begin_query_indexed_ext: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
index: u32,
) -> c_void,
pub cmd_end_query_indexed_ext: extern "system" fn(
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
index: u32,
) -> c_void,
pub cmd_draw_indirect_byte_count_ext: extern "system" fn(
command_buffer: CommandBuffer,
instance_count: u32,
first_instance: u32,
counter_buffer: Buffer,
counter_buffer_offset: DeviceSize,
counter_offset: u32,
vertex_stride: u32,
) -> c_void,
}
unsafe impl Send for ExtTransformFeedbackFn {}
unsafe impl Sync for ExtTransformFeedbackFn {}
impl ::std::clone::Clone for ExtTransformFeedbackFn {
fn clone(&self) -> Self {
ExtTransformFeedbackFn {
cmd_bind_transform_feedback_buffers_ext: self.cmd_bind_transform_feedback_buffers_ext,
cmd_begin_transform_feedback_ext: self.cmd_begin_transform_feedback_ext,
cmd_end_transform_feedback_ext: self.cmd_end_transform_feedback_ext,
cmd_begin_query_indexed_ext: self.cmd_begin_query_indexed_ext,
cmd_end_query_indexed_ext: self.cmd_end_query_indexed_ext,
cmd_draw_indirect_byte_count_ext: self.cmd_draw_indirect_byte_count_ext,
}
}
}
impl ExtTransformFeedbackFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtTransformFeedbackFn {
cmd_bind_transform_feedback_buffers_ext: unsafe {
extern "system" fn cmd_bind_transform_feedback_buffers_ext(
_command_buffer: CommandBuffer,
_first_binding: u32,
_binding_count: u32,
_p_buffers: *const Buffer,
_p_offsets: *const DeviceSize,
_p_sizes: *const DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_transform_feedback_buffers_ext)
))
}
let raw_name = stringify!(vkCmdBindTransformFeedbackBuffersEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_transform_feedback_buffers_ext
} else {
::std::mem::transmute(val)
}
},
cmd_begin_transform_feedback_ext: unsafe {
extern "system" fn cmd_begin_transform_feedback_ext(
_command_buffer: CommandBuffer,
_first_counter_buffer: u32,
_counter_buffer_count: u32,
_p_counter_buffers: *const Buffer,
_p_counter_buffer_offsets: *const DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_transform_feedback_ext)
))
}
let raw_name = stringify!(vkCmdBeginTransformFeedbackEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_transform_feedback_ext
} else {
::std::mem::transmute(val)
}
},
cmd_end_transform_feedback_ext: unsafe {
extern "system" fn cmd_end_transform_feedback_ext(
_command_buffer: CommandBuffer,
_first_counter_buffer: u32,
_counter_buffer_count: u32,
_p_counter_buffers: *const Buffer,
_p_counter_buffer_offsets: *const DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_transform_feedback_ext)
))
}
let raw_name = stringify!(vkCmdEndTransformFeedbackEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_transform_feedback_ext
} else {
::std::mem::transmute(val)
}
},
cmd_begin_query_indexed_ext: unsafe {
extern "system" fn cmd_begin_query_indexed_ext(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
_flags: QueryControlFlags,
_index: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_query_indexed_ext)
))
}
let raw_name = stringify!(vkCmdBeginQueryIndexedEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_query_indexed_ext
} else {
::std::mem::transmute(val)
}
},
cmd_end_query_indexed_ext: unsafe {
extern "system" fn cmd_end_query_indexed_ext(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
_index: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_query_indexed_ext)
))
}
let raw_name = stringify!(vkCmdEndQueryIndexedEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_query_indexed_ext
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indirect_byte_count_ext: unsafe {
extern "system" fn cmd_draw_indirect_byte_count_ext(
_command_buffer: CommandBuffer,
_instance_count: u32,
_first_instance: u32,
_counter_buffer: Buffer,
_counter_buffer_offset: DeviceSize,
_counter_offset: u32,
_vertex_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indirect_byte_count_ext)
))
}
let raw_name = stringify!(vkCmdDrawIndirectByteCountEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indirect_byte_count_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindTransformFeedbackBuffersEXT.html>"]
pub unsafe fn cmd_bind_transform_feedback_buffers_ext(
&self,
command_buffer: CommandBuffer,
first_binding: u32,
binding_count: u32,
p_buffers: *const Buffer,
p_offsets: *const DeviceSize,
p_sizes: *const DeviceSize,
) -> c_void {
(self.cmd_bind_transform_feedback_buffers_ext)(
command_buffer,
first_binding,
binding_count,
p_buffers,
p_offsets,
p_sizes,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginTransformFeedbackEXT.html>"]
pub unsafe fn cmd_begin_transform_feedback_ext(
&self,
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void {
(self.cmd_begin_transform_feedback_ext)(
command_buffer,
first_counter_buffer,
counter_buffer_count,
p_counter_buffers,
p_counter_buffer_offsets,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndTransformFeedbackEXT.html>"]
pub unsafe fn cmd_end_transform_feedback_ext(
&self,
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffer_count: u32,
p_counter_buffers: *const Buffer,
p_counter_buffer_offsets: *const DeviceSize,
) -> c_void {
(self.cmd_end_transform_feedback_ext)(
command_buffer,
first_counter_buffer,
counter_buffer_count,
p_counter_buffers,
p_counter_buffer_offsets,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginQueryIndexedEXT.html>"]
pub unsafe fn cmd_begin_query_indexed_ext(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
index: u32,
) -> c_void {
(self.cmd_begin_query_indexed_ext)(command_buffer, query_pool, query, flags, index)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndQueryIndexedEXT.html>"]
pub unsafe fn cmd_end_query_indexed_ext(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
index: u32,
) -> c_void {
(self.cmd_end_query_indexed_ext)(command_buffer, query_pool, query, index)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndirectByteCountEXT.html>"]
pub unsafe fn cmd_draw_indirect_byte_count_ext(
&self,
command_buffer: CommandBuffer,
instance_count: u32,
first_instance: u32,
counter_buffer: Buffer,
counter_buffer_offset: DeviceSize,
counter_offset: u32,
vertex_stride: u32,
) -> c_void {
(self.cmd_draw_indirect_byte_count_ext)(
command_buffer,
instance_count,
first_instance,
counter_buffer,
counter_buffer_offset,
counter_offset,
vertex_stride,
)
}
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: Self = StructureType(1000028000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: Self = StructureType(1000028001);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl StructureType {
pub const PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: Self = StructureType(1000028002);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl QueryType {
pub const TRANSFORM_FEEDBACK_STREAM_EXT: Self = QueryType(1000028004);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl BufferUsageFlags {
pub const TRANSFORM_FEEDBACK_BUFFER_EXT: Self = BufferUsageFlags(0b100000000000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl BufferUsageFlags {
pub const TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT: Self = BufferUsageFlags(0b1000000000000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl AccessFlags {
pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = AccessFlags(0b10000000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl AccessFlags {
pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self =
AccessFlags(0b100000000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl AccessFlags {
pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self =
AccessFlags(0b1000000000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_transform_feedback\'"]
impl PipelineStageFlags {
pub const TRANSFORM_FEEDBACK_EXT: Self = PipelineStageFlags(0b1000000000000000000000000);
}
impl NvxExtension30Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_extension_30\0")
.expect("Wrong extension string")
}
}
pub struct NvxExtension30Fn {}
unsafe impl Send for NvxExtension30Fn {}
unsafe impl Sync for NvxExtension30Fn {}
impl ::std::clone::Clone for NvxExtension30Fn {
fn clone(&self) -> Self {
NvxExtension30Fn {}
}
}
impl NvxExtension30Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxExtension30Fn {}
}
}
impl NvxExtension31Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_extension_31\0")
.expect("Wrong extension string")
}
}
pub struct NvxExtension31Fn {}
unsafe impl Send for NvxExtension31Fn {}
unsafe impl Sync for NvxExtension31Fn {}
impl ::std::clone::Clone for NvxExtension31Fn {
fn clone(&self) -> Self {
NvxExtension31Fn {}
}
}
impl NvxExtension31Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxExtension31Fn {}
}
}
impl AmdExtension32Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_32\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension32Fn {}
unsafe impl Send for AmdExtension32Fn {}
unsafe impl Sync for AmdExtension32Fn {}
impl ::std::clone::Clone for AmdExtension32Fn {
fn clone(&self) -> Self {
AmdExtension32Fn {}
}
}
impl AmdExtension32Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension32Fn {}
}
}
impl AmdExtension33Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_33\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension33Fn {}
unsafe impl Send for AmdExtension33Fn {}
unsafe impl Sync for AmdExtension33Fn {}
impl ::std::clone::Clone for AmdExtension33Fn {
fn clone(&self) -> Self {
AmdExtension33Fn {}
}
}
impl AmdExtension33Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension33Fn {}
}
}
impl AmdDrawIndirectCountFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_draw_indirect_count\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndirectCountAMD = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndexedIndirectCountAMD = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void;
pub struct AmdDrawIndirectCountFn {
pub cmd_draw_indirect_count_amd: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void,
pub cmd_draw_indexed_indirect_count_amd: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void,
}
unsafe impl Send for AmdDrawIndirectCountFn {}
unsafe impl Sync for AmdDrawIndirectCountFn {}
impl ::std::clone::Clone for AmdDrawIndirectCountFn {
fn clone(&self) -> Self {
AmdDrawIndirectCountFn {
cmd_draw_indirect_count_amd: self.cmd_draw_indirect_count_amd,
cmd_draw_indexed_indirect_count_amd: self.cmd_draw_indexed_indirect_count_amd,
}
}
}
impl AmdDrawIndirectCountFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdDrawIndirectCountFn {
cmd_draw_indirect_count_amd: unsafe {
extern "system" fn cmd_draw_indirect_count_amd(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indirect_count_amd)
))
}
let raw_name = stringify!(vkCmdDrawIndirectCountAMD);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indirect_count_amd
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indexed_indirect_count_amd: unsafe {
extern "system" fn cmd_draw_indexed_indirect_count_amd(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indexed_indirect_count_amd)
))
}
let raw_name = stringify!(vkCmdDrawIndexedIndirectCountAMD);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indexed_indirect_count_amd
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndirectCountAMD.html>"]
pub unsafe fn cmd_draw_indirect_count_amd(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indirect_count_amd)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndexedIndirectCountAMD.html>"]
pub unsafe fn cmd_draw_indexed_indirect_count_amd(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indexed_indirect_count_amd)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
)
}
}
impl AmdExtension35Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_35\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension35Fn {}
unsafe impl Send for AmdExtension35Fn {}
unsafe impl Sync for AmdExtension35Fn {}
impl ::std::clone::Clone for AmdExtension35Fn {
fn clone(&self) -> Self {
AmdExtension35Fn {}
}
}
impl AmdExtension35Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension35Fn {}
}
}
impl AmdNegativeViewportHeightFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_negative_viewport_height\0")
.expect("Wrong extension string")
}
}
pub struct AmdNegativeViewportHeightFn {}
unsafe impl Send for AmdNegativeViewportHeightFn {}
unsafe impl Sync for AmdNegativeViewportHeightFn {}
impl ::std::clone::Clone for AmdNegativeViewportHeightFn {
fn clone(&self) -> Self {
AmdNegativeViewportHeightFn {}
}
}
impl AmdNegativeViewportHeightFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdNegativeViewportHeightFn {}
}
}
impl AmdGpuShaderHalfFloatFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_gpu_shader_half_float\0")
.expect("Wrong extension string")
}
}
pub struct AmdGpuShaderHalfFloatFn {}
unsafe impl Send for AmdGpuShaderHalfFloatFn {}
unsafe impl Sync for AmdGpuShaderHalfFloatFn {}
impl ::std::clone::Clone for AmdGpuShaderHalfFloatFn {
fn clone(&self) -> Self {
AmdGpuShaderHalfFloatFn {}
}
}
impl AmdGpuShaderHalfFloatFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdGpuShaderHalfFloatFn {}
}
}
impl AmdShaderBallotFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_ballot\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderBallotFn {}
unsafe impl Send for AmdShaderBallotFn {}
unsafe impl Sync for AmdShaderBallotFn {}
impl ::std::clone::Clone for AmdShaderBallotFn {
fn clone(&self) -> Self {
AmdShaderBallotFn {}
}
}
impl AmdShaderBallotFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderBallotFn {}
}
}
impl AmdExtension39Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_39\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension39Fn {}
unsafe impl Send for AmdExtension39Fn {}
unsafe impl Sync for AmdExtension39Fn {}
impl ::std::clone::Clone for AmdExtension39Fn {
fn clone(&self) -> Self {
AmdExtension39Fn {}
}
}
impl AmdExtension39Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension39Fn {}
}
}
impl AmdExtension40Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_40\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension40Fn {}
unsafe impl Send for AmdExtension40Fn {}
unsafe impl Sync for AmdExtension40Fn {}
impl ::std::clone::Clone for AmdExtension40Fn {
fn clone(&self) -> Self {
AmdExtension40Fn {}
}
}
impl AmdExtension40Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension40Fn {}
}
}
impl AmdExtension41Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_41\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension41Fn {}
unsafe impl Send for AmdExtension41Fn {}
unsafe impl Sync for AmdExtension41Fn {}
impl ::std::clone::Clone for AmdExtension41Fn {
fn clone(&self) -> Self {
AmdExtension41Fn {}
}
}
impl AmdExtension41Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension41Fn {}
}
}
impl AmdTextureGatherBiasLodFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_texture_gather_bias_lod\0")
.expect("Wrong extension string")
}
}
pub struct AmdTextureGatherBiasLodFn {}
unsafe impl Send for AmdTextureGatherBiasLodFn {}
unsafe impl Sync for AmdTextureGatherBiasLodFn {}
impl ::std::clone::Clone for AmdTextureGatherBiasLodFn {
fn clone(&self) -> Self {
AmdTextureGatherBiasLodFn {}
}
}
impl AmdTextureGatherBiasLodFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdTextureGatherBiasLodFn {}
}
}
#[doc = "Generated from \'VK_AMD_texture_gather_bias_lod\'"]
impl StructureType {
pub const TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: Self = StructureType(1000041000);
}
impl AmdShaderInfoFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_info\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetShaderInfoAMD = extern "system" fn(
device: Device,
pipeline: Pipeline,
shader_stage: ShaderStageFlags,
info_type: ShaderInfoTypeAMD,
p_info_size: *mut usize,
p_info: *mut c_void,
) -> Result;
pub struct AmdShaderInfoFn {
pub get_shader_info_amd: extern "system" fn(
device: Device,
pipeline: Pipeline,
shader_stage: ShaderStageFlags,
info_type: ShaderInfoTypeAMD,
p_info_size: *mut usize,
p_info: *mut c_void,
) -> Result,
}
unsafe impl Send for AmdShaderInfoFn {}
unsafe impl Sync for AmdShaderInfoFn {}
impl ::std::clone::Clone for AmdShaderInfoFn {
fn clone(&self) -> Self {
AmdShaderInfoFn {
get_shader_info_amd: self.get_shader_info_amd,
}
}
}
impl AmdShaderInfoFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderInfoFn {
get_shader_info_amd: unsafe {
extern "system" fn get_shader_info_amd(
_device: Device,
_pipeline: Pipeline,
_shader_stage: ShaderStageFlags,
_info_type: ShaderInfoTypeAMD,
_p_info_size: *mut usize,
_p_info: *mut c_void,
) -> Result {
panic!(concat!("Unable to load ", stringify!(get_shader_info_amd)))
}
let raw_name = stringify!(vkGetShaderInfoAMD);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_shader_info_amd
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetShaderInfoAMD.html>"]
pub unsafe fn get_shader_info_amd(
&self,
device: Device,
pipeline: Pipeline,
shader_stage: ShaderStageFlags,
info_type: ShaderInfoTypeAMD,
p_info_size: *mut usize,
p_info: *mut c_void,
) -> Result {
(self.get_shader_info_amd)(
device,
pipeline,
shader_stage,
info_type,
p_info_size,
p_info,
)
}
}
impl AmdExtension44Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_44\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension44Fn {}
unsafe impl Send for AmdExtension44Fn {}
unsafe impl Sync for AmdExtension44Fn {}
impl ::std::clone::Clone for AmdExtension44Fn {
fn clone(&self) -> Self {
AmdExtension44Fn {}
}
}
impl AmdExtension44Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension44Fn {}
}
}
impl AmdExtension45Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_45\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension45Fn {}
unsafe impl Send for AmdExtension45Fn {}
unsafe impl Sync for AmdExtension45Fn {}
impl ::std::clone::Clone for AmdExtension45Fn {
fn clone(&self) -> Self {
AmdExtension45Fn {}
}
}
impl AmdExtension45Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension45Fn {}
}
}
impl AmdExtension46Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_46\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension46Fn {}
unsafe impl Send for AmdExtension46Fn {}
unsafe impl Sync for AmdExtension46Fn {}
impl ::std::clone::Clone for AmdExtension46Fn {
fn clone(&self) -> Self {
AmdExtension46Fn {}
}
}
impl AmdExtension46Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension46Fn {}
}
}
impl AmdShaderImageLoadStoreLodFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_image_load_store_lod\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderImageLoadStoreLodFn {}
unsafe impl Send for AmdShaderImageLoadStoreLodFn {}
unsafe impl Sync for AmdShaderImageLoadStoreLodFn {}
impl ::std::clone::Clone for AmdShaderImageLoadStoreLodFn {
fn clone(&self) -> Self {
AmdShaderImageLoadStoreLodFn {}
}
}
impl AmdShaderImageLoadStoreLodFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderImageLoadStoreLodFn {}
}
}
impl NvxExtension48Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_extension_48\0")
.expect("Wrong extension string")
}
}
pub struct NvxExtension48Fn {}
unsafe impl Send for NvxExtension48Fn {}
unsafe impl Sync for NvxExtension48Fn {}
impl ::std::clone::Clone for NvxExtension48Fn {
fn clone(&self) -> Self {
NvxExtension48Fn {}
}
}
impl NvxExtension48Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxExtension48Fn {}
}
}
impl GoogleExtension49Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_49\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension49Fn {}
unsafe impl Send for GoogleExtension49Fn {}
unsafe impl Sync for GoogleExtension49Fn {}
impl ::std::clone::Clone for GoogleExtension49Fn {
fn clone(&self) -> Self {
GoogleExtension49Fn {}
}
}
impl GoogleExtension49Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension49Fn {}
}
}
impl GoogleExtension50Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_50\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension50Fn {}
unsafe impl Send for GoogleExtension50Fn {}
unsafe impl Sync for GoogleExtension50Fn {}
impl ::std::clone::Clone for GoogleExtension50Fn {
fn clone(&self) -> Self {
GoogleExtension50Fn {}
}
}
impl GoogleExtension50Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension50Fn {}
}
}
impl NvCornerSampledImageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_corner_sampled_image\0")
.expect("Wrong extension string")
}
}
pub struct NvCornerSampledImageFn {}
unsafe impl Send for NvCornerSampledImageFn {}
unsafe impl Sync for NvCornerSampledImageFn {}
impl ::std::clone::Clone for NvCornerSampledImageFn {
fn clone(&self) -> Self {
NvCornerSampledImageFn {}
}
}
impl NvCornerSampledImageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvCornerSampledImageFn {}
}
}
#[doc = "Generated from \'VK_NV_corner_sampled_image\'"]
impl ImageCreateFlags {
pub const CORNER_SAMPLED_NV: Self = ImageCreateFlags(0b10000000000000);
}
#[doc = "Generated from \'VK_NV_corner_sampled_image\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: Self = StructureType(1000050000);
}
impl NvxExtension52Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_extension_52\0")
.expect("Wrong extension string")
}
}
pub struct NvxExtension52Fn {}
unsafe impl Send for NvxExtension52Fn {}
unsafe impl Sync for NvxExtension52Fn {}
impl ::std::clone::Clone for NvxExtension52Fn {
fn clone(&self) -> Self {
NvxExtension52Fn {}
}
}
impl NvxExtension52Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxExtension52Fn {}
}
}
impl NvExtension53Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_53\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension53Fn {}
unsafe impl Send for NvExtension53Fn {}
unsafe impl Sync for NvExtension53Fn {}
impl ::std::clone::Clone for NvExtension53Fn {
fn clone(&self) -> Self {
NvExtension53Fn {}
}
}
impl NvExtension53Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension53Fn {}
}
}
impl KhrMultiviewFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_multiview\0")
.expect("Wrong extension string")
}
}
pub struct KhrMultiviewFn {}
unsafe impl Send for KhrMultiviewFn {}
unsafe impl Sync for KhrMultiviewFn {}
impl ::std::clone::Clone for KhrMultiviewFn {
fn clone(&self) -> Self {
KhrMultiviewFn {}
}
}
impl KhrMultiviewFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrMultiviewFn {}
}
}
impl ImgFormatPvrtcFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_format_pvrtc\0")
.expect("Wrong extension string")
}
}
pub struct ImgFormatPvrtcFn {}
unsafe impl Send for ImgFormatPvrtcFn {}
unsafe impl Sync for ImgFormatPvrtcFn {}
impl ::std::clone::Clone for ImgFormatPvrtcFn {
fn clone(&self) -> Self {
ImgFormatPvrtcFn {}
}
}
impl ImgFormatPvrtcFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgFormatPvrtcFn {}
}
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC1_2BPP_UNORM_BLOCK_IMG: Self = Format(1000054000);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC1_4BPP_UNORM_BLOCK_IMG: Self = Format(1000054001);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC2_2BPP_UNORM_BLOCK_IMG: Self = Format(1000054002);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC2_4BPP_UNORM_BLOCK_IMG: Self = Format(1000054003);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC1_2BPP_SRGB_BLOCK_IMG: Self = Format(1000054004);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC1_4BPP_SRGB_BLOCK_IMG: Self = Format(1000054005);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC2_2BPP_SRGB_BLOCK_IMG: Self = Format(1000054006);
}
#[doc = "Generated from \'VK_IMG_format_pvrtc\'"]
impl Format {
pub const PVRTC2_4BPP_SRGB_BLOCK_IMG: Self = Format(1000054007);
}
impl NvExternalMemoryCapabilitiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_external_memory_capabilities\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
external_handle_type: ExternalMemoryHandleTypeFlagsNV,
p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
) -> Result;
pub struct NvExternalMemoryCapabilitiesFn {
pub get_physical_device_external_image_format_properties_nv: extern "system" fn(
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
external_handle_type: ExternalMemoryHandleTypeFlagsNV,
p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
) -> Result,
}
unsafe impl Send for NvExternalMemoryCapabilitiesFn {}
unsafe impl Sync for NvExternalMemoryCapabilitiesFn {}
impl ::std::clone::Clone for NvExternalMemoryCapabilitiesFn {
fn clone(&self) -> Self {
NvExternalMemoryCapabilitiesFn {
get_physical_device_external_image_format_properties_nv: self
.get_physical_device_external_image_format_properties_nv,
}
}
}
impl NvExternalMemoryCapabilitiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExternalMemoryCapabilitiesFn {
get_physical_device_external_image_format_properties_nv: unsafe {
extern "system" fn get_physical_device_external_image_format_properties_nv(
_physical_device: PhysicalDevice,
_format: Format,
_ty: ImageType,
_tiling: ImageTiling,
_usage: ImageUsageFlags,
_flags: ImageCreateFlags,
_external_handle_type: ExternalMemoryHandleTypeFlagsNV,
_p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_external_image_format_properties_nv)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceExternalImageFormatPropertiesNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_external_image_format_properties_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html>"]
pub unsafe fn get_physical_device_external_image_format_properties_nv(
&self,
physical_device: PhysicalDevice,
format: Format,
ty: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
external_handle_type: ExternalMemoryHandleTypeFlagsNV,
p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
) -> Result {
(self.get_physical_device_external_image_format_properties_nv)(
physical_device,
format,
ty,
tiling,
usage,
flags,
external_handle_type,
p_external_image_format_properties,
)
}
}
impl NvExternalMemoryFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_external_memory\0")
.expect("Wrong extension string")
}
}
pub struct NvExternalMemoryFn {}
unsafe impl Send for NvExternalMemoryFn {}
unsafe impl Sync for NvExternalMemoryFn {}
impl ::std::clone::Clone for NvExternalMemoryFn {
fn clone(&self) -> Self {
NvExternalMemoryFn {}
}
}
impl NvExternalMemoryFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExternalMemoryFn {}
}
}
#[doc = "Generated from \'VK_NV_external_memory\'"]
impl StructureType {
pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: Self = StructureType(1000056000);
}
#[doc = "Generated from \'VK_NV_external_memory\'"]
impl StructureType {
pub const EXPORT_MEMORY_ALLOCATE_INFO_NV: Self = StructureType(1000056001);
}
impl NvExternalMemoryWin32Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_external_memory_win32\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryWin32HandleNV = extern "system" fn(
device: Device,
memory: DeviceMemory,
handle_type: ExternalMemoryHandleTypeFlagsNV,
p_handle: *mut HANDLE,
) -> Result;
pub struct NvExternalMemoryWin32Fn {
pub get_memory_win32_handle_nv: extern "system" fn(
device: Device,
memory: DeviceMemory,
handle_type: ExternalMemoryHandleTypeFlagsNV,
p_handle: *mut HANDLE,
) -> Result,
}
unsafe impl Send for NvExternalMemoryWin32Fn {}
unsafe impl Sync for NvExternalMemoryWin32Fn {}
impl ::std::clone::Clone for NvExternalMemoryWin32Fn {
fn clone(&self) -> Self {
NvExternalMemoryWin32Fn {
get_memory_win32_handle_nv: self.get_memory_win32_handle_nv,
}
}
}
impl NvExternalMemoryWin32Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExternalMemoryWin32Fn {
get_memory_win32_handle_nv: unsafe {
extern "system" fn get_memory_win32_handle_nv(
_device: Device,
_memory: DeviceMemory,
_handle_type: ExternalMemoryHandleTypeFlagsNV,
_p_handle: *mut HANDLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_win32_handle_nv)
))
}
let raw_name = stringify!(vkGetMemoryWin32HandleNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_win32_handle_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryWin32HandleNV.html>"]
pub unsafe fn get_memory_win32_handle_nv(
&self,
device: Device,
memory: DeviceMemory,
handle_type: ExternalMemoryHandleTypeFlagsNV,
p_handle: *mut HANDLE,
) -> Result {
(self.get_memory_win32_handle_nv)(device, memory, handle_type, p_handle)
}
}
#[doc = "Generated from \'VK_NV_external_memory_win32\'"]
impl StructureType {
pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = StructureType(1000057000);
}
#[doc = "Generated from \'VK_NV_external_memory_win32\'"]
impl StructureType {
pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = StructureType(1000057001);
}
impl NvWin32KeyedMutexFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_win32_keyed_mutex\0")
.expect("Wrong extension string")
}
}
pub struct NvWin32KeyedMutexFn {}
unsafe impl Send for NvWin32KeyedMutexFn {}
unsafe impl Sync for NvWin32KeyedMutexFn {}
impl ::std::clone::Clone for NvWin32KeyedMutexFn {
fn clone(&self) -> Self {
NvWin32KeyedMutexFn {}
}
}
impl NvWin32KeyedMutexFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvWin32KeyedMutexFn {}
}
}
#[doc = "Generated from \'VK_NV_win32_keyed_mutex\'"]
impl StructureType {
pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: Self = StructureType(1000058000);
}
impl KhrGetPhysicalDeviceProperties2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_get_physical_device_properties2\0")
.expect("Wrong extension string")
}
}
pub struct KhrGetPhysicalDeviceProperties2Fn {}
unsafe impl Send for KhrGetPhysicalDeviceProperties2Fn {}
unsafe impl Sync for KhrGetPhysicalDeviceProperties2Fn {}
impl ::std::clone::Clone for KhrGetPhysicalDeviceProperties2Fn {
fn clone(&self) -> Self {
KhrGetPhysicalDeviceProperties2Fn {}
}
}
impl KhrGetPhysicalDeviceProperties2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrGetPhysicalDeviceProperties2Fn {}
}
}
impl KhrDeviceGroupFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_device_group\0")
.expect("Wrong extension string")
}
}
pub struct KhrDeviceGroupFn {
pub get_device_group_present_capabilities_khr: extern "system" fn(
device: Device,
p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result,
pub get_device_group_surface_present_modes_khr: extern "system" fn(
device: Device,
surface: SurfaceKHR,
p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result,
pub get_physical_device_present_rectangles_khr: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_rect_count: *mut u32,
p_rects: *mut Rect2D,
) -> Result,
pub acquire_next_image2_khr: extern "system" fn(
device: Device,
p_acquire_info: *const AcquireNextImageInfoKHR,
p_image_index: *mut u32,
) -> Result,
}
unsafe impl Send for KhrDeviceGroupFn {}
unsafe impl Sync for KhrDeviceGroupFn {}
impl ::std::clone::Clone for KhrDeviceGroupFn {
fn clone(&self) -> Self {
KhrDeviceGroupFn {
get_device_group_present_capabilities_khr: self
.get_device_group_present_capabilities_khr,
get_device_group_surface_present_modes_khr: self
.get_device_group_surface_present_modes_khr,
get_physical_device_present_rectangles_khr: self
.get_physical_device_present_rectangles_khr,
acquire_next_image2_khr: self.acquire_next_image2_khr,
}
}
}
impl KhrDeviceGroupFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDeviceGroupFn {
get_device_group_present_capabilities_khr: unsafe {
extern "system" fn get_device_group_present_capabilities_khr(
_device: Device,
_p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_device_group_present_capabilities_khr)
))
}
let raw_name = stringify!(vkGetDeviceGroupPresentCapabilitiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_group_present_capabilities_khr
} else {
::std::mem::transmute(val)
}
},
get_device_group_surface_present_modes_khr: unsafe {
extern "system" fn get_device_group_surface_present_modes_khr(
_device: Device,
_surface: SurfaceKHR,
_p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_device_group_surface_present_modes_khr)
))
}
let raw_name = stringify!(vkGetDeviceGroupSurfacePresentModesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_device_group_surface_present_modes_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_present_rectangles_khr: unsafe {
extern "system" fn get_physical_device_present_rectangles_khr(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_rect_count: *mut u32,
_p_rects: *mut Rect2D,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_present_rectangles_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDevicePresentRectanglesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_present_rectangles_khr
} else {
::std::mem::transmute(val)
}
},
acquire_next_image2_khr: unsafe {
extern "system" fn acquire_next_image2_khr(
_device: Device,
_p_acquire_info: *const AcquireNextImageInfoKHR,
_p_image_index: *mut u32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_next_image2_khr)
))
}
let raw_name = stringify!(vkAcquireNextImage2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
acquire_next_image2_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html>"]
pub unsafe fn get_device_group_present_capabilities_khr(
&self,
device: Device,
p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
) -> Result {
(self.get_device_group_present_capabilities_khr)(
device,
p_device_group_present_capabilities,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html>"]
pub unsafe fn get_device_group_surface_present_modes_khr(
&self,
device: Device,
surface: SurfaceKHR,
p_modes: *mut DeviceGroupPresentModeFlagsKHR,
) -> Result {
(self.get_device_group_surface_present_modes_khr)(device, surface, p_modes)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html>"]
pub unsafe fn get_physical_device_present_rectangles_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_rect_count: *mut u32,
p_rects: *mut Rect2D,
) -> Result {
(self.get_physical_device_present_rectangles_khr)(
physical_device,
surface,
p_rect_count,
p_rects,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAcquireNextImage2KHR.html>"]
pub unsafe fn acquire_next_image2_khr(
&self,
device: Device,
p_acquire_info: *const AcquireNextImageInfoKHR,
p_image_index: *mut u32,
) -> Result {
(self.acquire_next_image2_khr)(device, p_acquire_info, p_image_index)
}
}
impl ExtValidationFlagsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_validation_flags\0")
.expect("Wrong extension string")
}
}
pub struct ExtValidationFlagsFn {}
unsafe impl Send for ExtValidationFlagsFn {}
unsafe impl Sync for ExtValidationFlagsFn {}
impl ::std::clone::Clone for ExtValidationFlagsFn {
fn clone(&self) -> Self {
ExtValidationFlagsFn {}
}
}
impl ExtValidationFlagsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtValidationFlagsFn {}
}
}
#[doc = "Generated from \'VK_EXT_validation_flags\'"]
impl StructureType {
pub const VALIDATION_FLAGS_EXT: Self = StructureType(1000061000);
}
impl NnViSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NN_vi_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateViSurfaceNN = extern "system" fn(
instance: Instance,
p_create_info: *const ViSurfaceCreateInfoNN,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct NnViSurfaceFn {
pub create_vi_surface_nn: extern "system" fn(
instance: Instance,
p_create_info: *const ViSurfaceCreateInfoNN,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for NnViSurfaceFn {}
unsafe impl Sync for NnViSurfaceFn {}
impl ::std::clone::Clone for NnViSurfaceFn {
fn clone(&self) -> Self {
NnViSurfaceFn {
create_vi_surface_nn: self.create_vi_surface_nn,
}
}
}
impl NnViSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NnViSurfaceFn {
create_vi_surface_nn: unsafe {
extern "system" fn create_vi_surface_nn(
_instance: Instance,
_p_create_info: *const ViSurfaceCreateInfoNN,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!("Unable to load ", stringify!(create_vi_surface_nn)))
}
let raw_name = stringify!(vkCreateViSurfaceNN);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_vi_surface_nn
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateViSurfaceNN.html>"]
pub unsafe fn create_vi_surface_nn(
&self,
instance: Instance,
p_create_info: *const ViSurfaceCreateInfoNN,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_vi_surface_nn)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_NN_vi_surface\'"]
impl StructureType {
pub const VI_SURFACE_CREATE_INFO_NN: Self = StructureType(1000062000);
}
impl KhrShaderDrawParametersFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_shader_draw_parameters\0")
.expect("Wrong extension string")
}
}
pub struct KhrShaderDrawParametersFn {}
unsafe impl Send for KhrShaderDrawParametersFn {}
unsafe impl Sync for KhrShaderDrawParametersFn {}
impl ::std::clone::Clone for KhrShaderDrawParametersFn {
fn clone(&self) -> Self {
KhrShaderDrawParametersFn {}
}
}
impl KhrShaderDrawParametersFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrShaderDrawParametersFn {}
}
}
impl ExtShaderSubgroupBallotFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_shader_subgroup_ballot\0")
.expect("Wrong extension string")
}
}
pub struct ExtShaderSubgroupBallotFn {}
unsafe impl Send for ExtShaderSubgroupBallotFn {}
unsafe impl Sync for ExtShaderSubgroupBallotFn {}
impl ::std::clone::Clone for ExtShaderSubgroupBallotFn {
fn clone(&self) -> Self {
ExtShaderSubgroupBallotFn {}
}
}
impl ExtShaderSubgroupBallotFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtShaderSubgroupBallotFn {}
}
}
impl ExtShaderSubgroupVoteFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_shader_subgroup_vote\0")
.expect("Wrong extension string")
}
}
pub struct ExtShaderSubgroupVoteFn {}
unsafe impl Send for ExtShaderSubgroupVoteFn {}
unsafe impl Sync for ExtShaderSubgroupVoteFn {}
impl ::std::clone::Clone for ExtShaderSubgroupVoteFn {
fn clone(&self) -> Self {
ExtShaderSubgroupVoteFn {}
}
}
impl ExtShaderSubgroupVoteFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtShaderSubgroupVoteFn {}
}
}
impl ArmExtension01Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_ARM_extension_01\0")
.expect("Wrong extension string")
}
}
pub struct ArmExtension01Fn {}
unsafe impl Send for ArmExtension01Fn {}
unsafe impl Sync for ArmExtension01Fn {}
impl ::std::clone::Clone for ArmExtension01Fn {
fn clone(&self) -> Self {
ArmExtension01Fn {}
}
}
impl ArmExtension01Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ArmExtension01Fn {}
}
}
impl ExtAstcDecodeModeFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_astc_decode_mode\0")
.expect("Wrong extension string")
}
}
pub struct ExtAstcDecodeModeFn {}
unsafe impl Send for ExtAstcDecodeModeFn {}
unsafe impl Sync for ExtAstcDecodeModeFn {}
impl ::std::clone::Clone for ExtAstcDecodeModeFn {
fn clone(&self) -> Self {
ExtAstcDecodeModeFn {}
}
}
impl ExtAstcDecodeModeFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtAstcDecodeModeFn {}
}
}
#[doc = "Generated from \'VK_EXT_astc_decode_mode\'"]
impl StructureType {
pub const IMAGE_VIEW_ASTC_DECODE_MODE_EXT: Self = StructureType(1000067000);
}
#[doc = "Generated from \'VK_EXT_astc_decode_mode\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: Self = StructureType(1000067001);
}
impl ImgExtension69Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_extension_69\0")
.expect("Wrong extension string")
}
}
pub struct ImgExtension69Fn {}
unsafe impl Send for ImgExtension69Fn {}
unsafe impl Sync for ImgExtension69Fn {}
impl ::std::clone::Clone for ImgExtension69Fn {
fn clone(&self) -> Self {
ImgExtension69Fn {}
}
}
impl ImgExtension69Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgExtension69Fn {}
}
}
impl KhrMaintenance1Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_maintenance1\0")
.expect("Wrong extension string")
}
}
pub struct KhrMaintenance1Fn {}
unsafe impl Send for KhrMaintenance1Fn {}
unsafe impl Sync for KhrMaintenance1Fn {}
impl ::std::clone::Clone for KhrMaintenance1Fn {
fn clone(&self) -> Self {
KhrMaintenance1Fn {}
}
}
impl KhrMaintenance1Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrMaintenance1Fn {}
}
}
impl KhrDeviceGroupCreationFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_device_group_creation\0")
.expect("Wrong extension string")
}
}
pub struct KhrDeviceGroupCreationFn {}
unsafe impl Send for KhrDeviceGroupCreationFn {}
unsafe impl Sync for KhrDeviceGroupCreationFn {}
impl ::std::clone::Clone for KhrDeviceGroupCreationFn {
fn clone(&self) -> Self {
KhrDeviceGroupCreationFn {}
}
}
impl KhrDeviceGroupCreationFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDeviceGroupCreationFn {}
}
}
impl KhrExternalMemoryCapabilitiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_memory_capabilities\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalMemoryCapabilitiesFn {}
unsafe impl Send for KhrExternalMemoryCapabilitiesFn {}
unsafe impl Sync for KhrExternalMemoryCapabilitiesFn {}
impl ::std::clone::Clone for KhrExternalMemoryCapabilitiesFn {
fn clone(&self) -> Self {
KhrExternalMemoryCapabilitiesFn {}
}
}
impl KhrExternalMemoryCapabilitiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalMemoryCapabilitiesFn {}
}
}
impl KhrExternalMemoryFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_memory\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalMemoryFn {}
unsafe impl Send for KhrExternalMemoryFn {}
unsafe impl Sync for KhrExternalMemoryFn {}
impl ::std::clone::Clone for KhrExternalMemoryFn {
fn clone(&self) -> Self {
KhrExternalMemoryFn {}
}
}
impl KhrExternalMemoryFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalMemoryFn {}
}
}
impl KhrExternalMemoryWin32Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_memory_win32\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryWin32HandleKHR = extern "system" fn(
device: Device,
p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryWin32HandlePropertiesKHR = extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
handle: HANDLE,
p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
) -> Result;
pub struct KhrExternalMemoryWin32Fn {
pub get_memory_win32_handle_khr: extern "system" fn(
device: Device,
p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result,
pub get_memory_win32_handle_properties_khr: extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
handle: HANDLE,
p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
) -> Result,
}
unsafe impl Send for KhrExternalMemoryWin32Fn {}
unsafe impl Sync for KhrExternalMemoryWin32Fn {}
impl ::std::clone::Clone for KhrExternalMemoryWin32Fn {
fn clone(&self) -> Self {
KhrExternalMemoryWin32Fn {
get_memory_win32_handle_khr: self.get_memory_win32_handle_khr,
get_memory_win32_handle_properties_khr: self.get_memory_win32_handle_properties_khr,
}
}
}
impl KhrExternalMemoryWin32Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalMemoryWin32Fn {
get_memory_win32_handle_khr: unsafe {
extern "system" fn get_memory_win32_handle_khr(
_device: Device,
_p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
_p_handle: *mut HANDLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_win32_handle_khr)
))
}
let raw_name = stringify!(vkGetMemoryWin32HandleKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_win32_handle_khr
} else {
::std::mem::transmute(val)
}
},
get_memory_win32_handle_properties_khr: unsafe {
extern "system" fn get_memory_win32_handle_properties_khr(
_device: Device,
_handle_type: ExternalMemoryHandleTypeFlags,
_handle: HANDLE,
_p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_win32_handle_properties_khr)
))
}
let raw_name = stringify!(vkGetMemoryWin32HandlePropertiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_win32_handle_properties_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryWin32HandleKHR.html>"]
pub unsafe fn get_memory_win32_handle_khr(
&self,
device: Device,
p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result {
(self.get_memory_win32_handle_khr)(device, p_get_win32_handle_info, p_handle)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryWin32HandlePropertiesKHR.html>"]
pub unsafe fn get_memory_win32_handle_properties_khr(
&self,
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
handle: HANDLE,
p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
) -> Result {
(self.get_memory_win32_handle_properties_khr)(
device,
handle_type,
handle,
p_memory_win32_handle_properties,
)
}
}
#[doc = "Generated from \'VK_KHR_external_memory_win32\'"]
impl StructureType {
pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000073000);
}
#[doc = "Generated from \'VK_KHR_external_memory_win32\'"]
impl StructureType {
pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000073001);
}
#[doc = "Generated from \'VK_KHR_external_memory_win32\'"]
impl StructureType {
pub const MEMORY_WIN32_HANDLE_PROPERTIES_KHR: Self = StructureType(1000073002);
}
#[doc = "Generated from \'VK_KHR_external_memory_win32\'"]
impl StructureType {
pub const MEMORY_GET_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000073003);
}
impl KhrExternalMemoryFdFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_memory_fd\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryFdKHR = extern "system" fn(
device: Device,
p_get_fd_info: *const MemoryGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryFdPropertiesKHR = extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
fd: c_int,
p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
) -> Result;
pub struct KhrExternalMemoryFdFn {
pub get_memory_fd_khr: extern "system" fn(
device: Device,
p_get_fd_info: *const MemoryGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result,
pub get_memory_fd_properties_khr: extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
fd: c_int,
p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
) -> Result,
}
unsafe impl Send for KhrExternalMemoryFdFn {}
unsafe impl Sync for KhrExternalMemoryFdFn {}
impl ::std::clone::Clone for KhrExternalMemoryFdFn {
fn clone(&self) -> Self {
KhrExternalMemoryFdFn {
get_memory_fd_khr: self.get_memory_fd_khr,
get_memory_fd_properties_khr: self.get_memory_fd_properties_khr,
}
}
}
impl KhrExternalMemoryFdFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalMemoryFdFn {
get_memory_fd_khr: unsafe {
extern "system" fn get_memory_fd_khr(
_device: Device,
_p_get_fd_info: *const MemoryGetFdInfoKHR,
_p_fd: *mut c_int,
) -> Result {
panic!(concat!("Unable to load ", stringify!(get_memory_fd_khr)))
}
let raw_name = stringify!(vkGetMemoryFdKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_fd_khr
} else {
::std::mem::transmute(val)
}
},
get_memory_fd_properties_khr: unsafe {
extern "system" fn get_memory_fd_properties_khr(
_device: Device,
_handle_type: ExternalMemoryHandleTypeFlags,
_fd: c_int,
_p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_fd_properties_khr)
))
}
let raw_name = stringify!(vkGetMemoryFdPropertiesKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_fd_properties_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryFdKHR.html>"]
pub unsafe fn get_memory_fd_khr(
&self,
device: Device,
p_get_fd_info: *const MemoryGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result {
(self.get_memory_fd_khr)(device, p_get_fd_info, p_fd)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryFdPropertiesKHR.html>"]
pub unsafe fn get_memory_fd_properties_khr(
&self,
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
fd: c_int,
p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
) -> Result {
(self.get_memory_fd_properties_khr)(device, handle_type, fd, p_memory_fd_properties)
}
}
#[doc = "Generated from \'VK_KHR_external_memory_fd\'"]
impl StructureType {
pub const IMPORT_MEMORY_FD_INFO_KHR: Self = StructureType(1000074000);
}
#[doc = "Generated from \'VK_KHR_external_memory_fd\'"]
impl StructureType {
pub const MEMORY_FD_PROPERTIES_KHR: Self = StructureType(1000074001);
}
#[doc = "Generated from \'VK_KHR_external_memory_fd\'"]
impl StructureType {
pub const MEMORY_GET_FD_INFO_KHR: Self = StructureType(1000074002);
}
impl KhrWin32KeyedMutexFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_win32_keyed_mutex\0")
.expect("Wrong extension string")
}
}
pub struct KhrWin32KeyedMutexFn {}
unsafe impl Send for KhrWin32KeyedMutexFn {}
unsafe impl Sync for KhrWin32KeyedMutexFn {}
impl ::std::clone::Clone for KhrWin32KeyedMutexFn {
fn clone(&self) -> Self {
KhrWin32KeyedMutexFn {}
}
}
impl KhrWin32KeyedMutexFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrWin32KeyedMutexFn {}
}
}
#[doc = "Generated from \'VK_KHR_win32_keyed_mutex\'"]
impl StructureType {
pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: Self = StructureType(1000075000);
}
impl KhrExternalSemaphoreCapabilitiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_semaphore_capabilities\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalSemaphoreCapabilitiesFn {}
unsafe impl Send for KhrExternalSemaphoreCapabilitiesFn {}
unsafe impl Sync for KhrExternalSemaphoreCapabilitiesFn {}
impl ::std::clone::Clone for KhrExternalSemaphoreCapabilitiesFn {
fn clone(&self) -> Self {
KhrExternalSemaphoreCapabilitiesFn {}
}
}
impl KhrExternalSemaphoreCapabilitiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalSemaphoreCapabilitiesFn {}
}
}
impl KhrExternalSemaphoreFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_semaphore\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalSemaphoreFn {}
unsafe impl Send for KhrExternalSemaphoreFn {}
unsafe impl Sync for KhrExternalSemaphoreFn {}
impl ::std::clone::Clone for KhrExternalSemaphoreFn {
fn clone(&self) -> Self {
KhrExternalSemaphoreFn {}
}
}
impl KhrExternalSemaphoreFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalSemaphoreFn {}
}
}
impl KhrExternalSemaphoreWin32Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_semaphore_win32\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkImportSemaphoreWin32HandleKHR = extern "system" fn(
device: Device,
p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetSemaphoreWin32HandleKHR = extern "system" fn(
device: Device,
p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result;
pub struct KhrExternalSemaphoreWin32Fn {
pub import_semaphore_win32_handle_khr: extern "system" fn(
device: Device,
p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
) -> Result,
pub get_semaphore_win32_handle_khr: extern "system" fn(
device: Device,
p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result,
}
unsafe impl Send for KhrExternalSemaphoreWin32Fn {}
unsafe impl Sync for KhrExternalSemaphoreWin32Fn {}
impl ::std::clone::Clone for KhrExternalSemaphoreWin32Fn {
fn clone(&self) -> Self {
KhrExternalSemaphoreWin32Fn {
import_semaphore_win32_handle_khr: self.import_semaphore_win32_handle_khr,
get_semaphore_win32_handle_khr: self.get_semaphore_win32_handle_khr,
}
}
}
impl KhrExternalSemaphoreWin32Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalSemaphoreWin32Fn {
import_semaphore_win32_handle_khr: unsafe {
extern "system" fn import_semaphore_win32_handle_khr(
_device: Device,
_p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(import_semaphore_win32_handle_khr)
))
}
let raw_name = stringify!(vkImportSemaphoreWin32HandleKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
import_semaphore_win32_handle_khr
} else {
::std::mem::transmute(val)
}
},
get_semaphore_win32_handle_khr: unsafe {
extern "system" fn get_semaphore_win32_handle_khr(
_device: Device,
_p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
_p_handle: *mut HANDLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_semaphore_win32_handle_khr)
))
}
let raw_name = stringify!(vkGetSemaphoreWin32HandleKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_semaphore_win32_handle_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkImportSemaphoreWin32HandleKHR.html>"]
pub unsafe fn import_semaphore_win32_handle_khr(
&self,
device: Device,
p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
) -> Result {
(self.import_semaphore_win32_handle_khr)(device, p_import_semaphore_win32_handle_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSemaphoreWin32HandleKHR.html>"]
pub unsafe fn get_semaphore_win32_handle_khr(
&self,
device: Device,
p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result {
(self.get_semaphore_win32_handle_khr)(device, p_get_win32_handle_info, p_handle)
}
}
#[doc = "Generated from \'VK_KHR_external_semaphore_win32\'"]
impl StructureType {
pub const IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000078000);
}
#[doc = "Generated from \'VK_KHR_external_semaphore_win32\'"]
impl StructureType {
pub const EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000078001);
}
#[doc = "Generated from \'VK_KHR_external_semaphore_win32\'"]
impl StructureType {
pub const D3D12_FENCE_SUBMIT_INFO_KHR: Self = StructureType(1000078002);
}
#[doc = "Generated from \'VK_KHR_external_semaphore_win32\'"]
impl StructureType {
pub const SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000078003);
}
impl KhrExternalSemaphoreFdFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_semaphore_fd\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkImportSemaphoreFdKHR = extern "system" fn(
device: Device,
p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetSemaphoreFdKHR = extern "system" fn(
device: Device,
p_get_fd_info: *const SemaphoreGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result;
pub struct KhrExternalSemaphoreFdFn {
pub import_semaphore_fd_khr: extern "system" fn(
device: Device,
p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
) -> Result,
pub get_semaphore_fd_khr: extern "system" fn(
device: Device,
p_get_fd_info: *const SemaphoreGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result,
}
unsafe impl Send for KhrExternalSemaphoreFdFn {}
unsafe impl Sync for KhrExternalSemaphoreFdFn {}
impl ::std::clone::Clone for KhrExternalSemaphoreFdFn {
fn clone(&self) -> Self {
KhrExternalSemaphoreFdFn {
import_semaphore_fd_khr: self.import_semaphore_fd_khr,
get_semaphore_fd_khr: self.get_semaphore_fd_khr,
}
}
}
impl KhrExternalSemaphoreFdFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalSemaphoreFdFn {
import_semaphore_fd_khr: unsafe {
extern "system" fn import_semaphore_fd_khr(
_device: Device,
_p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(import_semaphore_fd_khr)
))
}
let raw_name = stringify!(vkImportSemaphoreFdKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
import_semaphore_fd_khr
} else {
::std::mem::transmute(val)
}
},
get_semaphore_fd_khr: unsafe {
extern "system" fn get_semaphore_fd_khr(
_device: Device,
_p_get_fd_info: *const SemaphoreGetFdInfoKHR,
_p_fd: *mut c_int,
) -> Result {
panic!(concat!("Unable to load ", stringify!(get_semaphore_fd_khr)))
}
let raw_name = stringify!(vkGetSemaphoreFdKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_semaphore_fd_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkImportSemaphoreFdKHR.html>"]
pub unsafe fn import_semaphore_fd_khr(
&self,
device: Device,
p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
) -> Result {
(self.import_semaphore_fd_khr)(device, p_import_semaphore_fd_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSemaphoreFdKHR.html>"]
pub unsafe fn get_semaphore_fd_khr(
&self,
device: Device,
p_get_fd_info: *const SemaphoreGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result {
(self.get_semaphore_fd_khr)(device, p_get_fd_info, p_fd)
}
}
#[doc = "Generated from \'VK_KHR_external_semaphore_fd\'"]
impl StructureType {
pub const IMPORT_SEMAPHORE_FD_INFO_KHR: Self = StructureType(1000079000);
}
#[doc = "Generated from \'VK_KHR_external_semaphore_fd\'"]
impl StructureType {
pub const SEMAPHORE_GET_FD_INFO_KHR: Self = StructureType(1000079001);
}
impl KhrPushDescriptorFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_push_descriptor\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdPushDescriptorSetKHR = extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
set: u32,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdPushDescriptorSetWithTemplateKHR = extern "system" fn(
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
p_data: *const c_void,
) -> c_void;
pub struct KhrPushDescriptorFn {
pub cmd_push_descriptor_set_khr: extern "system" fn(
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
set: u32,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
) -> c_void,
pub cmd_push_descriptor_set_with_template_khr: extern "system" fn(
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
p_data: *const c_void,
) -> c_void,
}
unsafe impl Send for KhrPushDescriptorFn {}
unsafe impl Sync for KhrPushDescriptorFn {}
impl ::std::clone::Clone for KhrPushDescriptorFn {
fn clone(&self) -> Self {
KhrPushDescriptorFn {
cmd_push_descriptor_set_khr: self.cmd_push_descriptor_set_khr,
cmd_push_descriptor_set_with_template_khr: self
.cmd_push_descriptor_set_with_template_khr,
}
}
}
impl KhrPushDescriptorFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrPushDescriptorFn {
cmd_push_descriptor_set_khr: unsafe {
extern "system" fn cmd_push_descriptor_set_khr(
_command_buffer: CommandBuffer,
_pipeline_bind_point: PipelineBindPoint,
_layout: PipelineLayout,
_set: u32,
_descriptor_write_count: u32,
_p_descriptor_writes: *const WriteDescriptorSet,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_push_descriptor_set_khr)
))
}
let raw_name = stringify!(vkCmdPushDescriptorSetKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_push_descriptor_set_khr
} else {
::std::mem::transmute(val)
}
},
cmd_push_descriptor_set_with_template_khr: unsafe {
extern "system" fn cmd_push_descriptor_set_with_template_khr(
_command_buffer: CommandBuffer,
_descriptor_update_template: DescriptorUpdateTemplate,
_layout: PipelineLayout,
_set: u32,
_p_data: *const c_void,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_push_descriptor_set_with_template_khr)
))
}
let raw_name = stringify!(vkCmdPushDescriptorSetWithTemplateKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_push_descriptor_set_with_template_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdPushDescriptorSetKHR.html>"]
pub unsafe fn cmd_push_descriptor_set_khr(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
set: u32,
descriptor_write_count: u32,
p_descriptor_writes: *const WriteDescriptorSet,
) -> c_void {
(self.cmd_push_descriptor_set_khr)(
command_buffer,
pipeline_bind_point,
layout,
set,
descriptor_write_count,
p_descriptor_writes,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html>"]
pub unsafe fn cmd_push_descriptor_set_with_template_khr(
&self,
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
p_data: *const c_void,
) -> c_void {
(self.cmd_push_descriptor_set_with_template_khr)(
command_buffer,
descriptor_update_template,
layout,
set,
p_data,
)
}
}
#[doc = "Generated from \'VK_KHR_push_descriptor\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: Self = StructureType(1000080000);
}
#[doc = "Generated from \'VK_KHR_push_descriptor\'"]
impl DescriptorSetLayoutCreateFlags {
pub const PUSH_DESCRIPTOR_KHR: Self = DescriptorSetLayoutCreateFlags(0b1);
}
impl ExtConditionalRenderingFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_conditional_rendering\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginConditionalRenderingEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndConditionalRenderingEXT =
extern "system" fn(command_buffer: CommandBuffer) -> c_void;
pub struct ExtConditionalRenderingFn {
pub cmd_begin_conditional_rendering_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
) -> c_void,
pub cmd_end_conditional_rendering_ext:
extern "system" fn(command_buffer: CommandBuffer) -> c_void,
}
unsafe impl Send for ExtConditionalRenderingFn {}
unsafe impl Sync for ExtConditionalRenderingFn {}
impl ::std::clone::Clone for ExtConditionalRenderingFn {
fn clone(&self) -> Self {
ExtConditionalRenderingFn {
cmd_begin_conditional_rendering_ext: self.cmd_begin_conditional_rendering_ext,
cmd_end_conditional_rendering_ext: self.cmd_end_conditional_rendering_ext,
}
}
}
impl ExtConditionalRenderingFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtConditionalRenderingFn {
cmd_begin_conditional_rendering_ext: unsafe {
extern "system" fn cmd_begin_conditional_rendering_ext(
_command_buffer: CommandBuffer,
_p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_conditional_rendering_ext)
))
}
let raw_name = stringify!(vkCmdBeginConditionalRenderingEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_conditional_rendering_ext
} else {
::std::mem::transmute(val)
}
},
cmd_end_conditional_rendering_ext: unsafe {
extern "system" fn cmd_end_conditional_rendering_ext(
_command_buffer: CommandBuffer,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_conditional_rendering_ext)
))
}
let raw_name = stringify!(vkCmdEndConditionalRenderingEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_conditional_rendering_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginConditionalRenderingEXT.html>"]
pub unsafe fn cmd_begin_conditional_rendering_ext(
&self,
command_buffer: CommandBuffer,
p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
) -> c_void {
(self.cmd_begin_conditional_rendering_ext)(command_buffer, p_conditional_rendering_begin)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndConditionalRenderingEXT.html>"]
pub unsafe fn cmd_end_conditional_rendering_ext(
&self,
command_buffer: CommandBuffer,
) -> c_void {
(self.cmd_end_conditional_rendering_ext)(command_buffer)
}
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl StructureType {
pub const COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: Self =
StructureType(1000081000);
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: Self = StructureType(1000081001);
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl StructureType {
pub const CONDITIONAL_RENDERING_BEGIN_INFO_EXT: Self = StructureType(1000081002);
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl AccessFlags {
pub const CONDITIONAL_RENDERING_READ_EXT: Self = AccessFlags(0b100000000000000000000);
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl BufferUsageFlags {
pub const CONDITIONAL_RENDERING_EXT: Self = BufferUsageFlags(0b1000000000);
}
#[doc = "Generated from \'VK_EXT_conditional_rendering\'"]
impl PipelineStageFlags {
pub const CONDITIONAL_RENDERING_EXT: Self = PipelineStageFlags(0b1000000000000000000);
}
impl KhrShaderFloat16Int8Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_shader_float16_int8\0")
.expect("Wrong extension string")
}
}
pub struct KhrShaderFloat16Int8Fn {}
unsafe impl Send for KhrShaderFloat16Int8Fn {}
unsafe impl Sync for KhrShaderFloat16Int8Fn {}
impl ::std::clone::Clone for KhrShaderFloat16Int8Fn {
fn clone(&self) -> Self {
KhrShaderFloat16Int8Fn {}
}
}
impl KhrShaderFloat16Int8Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrShaderFloat16Int8Fn {}
}
}
#[doc = "Generated from \'VK_KHR_shader_float16_int8\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR: Self = StructureType(1000082000);
}
impl Khr16bitStorageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_16bit_storage\0")
.expect("Wrong extension string")
}
}
pub struct Khr16bitStorageFn {}
unsafe impl Send for Khr16bitStorageFn {}
unsafe impl Sync for Khr16bitStorageFn {}
impl ::std::clone::Clone for Khr16bitStorageFn {
fn clone(&self) -> Self {
Khr16bitStorageFn {}
}
}
impl Khr16bitStorageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
Khr16bitStorageFn {}
}
}
impl KhrIncrementalPresentFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_incremental_present\0")
.expect("Wrong extension string")
}
}
pub struct KhrIncrementalPresentFn {}
unsafe impl Send for KhrIncrementalPresentFn {}
unsafe impl Sync for KhrIncrementalPresentFn {}
impl ::std::clone::Clone for KhrIncrementalPresentFn {
fn clone(&self) -> Self {
KhrIncrementalPresentFn {}
}
}
impl KhrIncrementalPresentFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrIncrementalPresentFn {}
}
}
#[doc = "Generated from \'VK_KHR_incremental_present\'"]
impl StructureType {
pub const PRESENT_REGIONS_KHR: Self = StructureType(1000084000);
}
impl KhrDescriptorUpdateTemplateFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_descriptor_update_template\0")
.expect("Wrong extension string")
}
}
pub struct KhrDescriptorUpdateTemplateFn {
pub cmd_push_descriptor_set_with_template_khr: extern "system" fn(
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
p_data: *const c_void,
) -> c_void,
}
unsafe impl Send for KhrDescriptorUpdateTemplateFn {}
unsafe impl Sync for KhrDescriptorUpdateTemplateFn {}
impl ::std::clone::Clone for KhrDescriptorUpdateTemplateFn {
fn clone(&self) -> Self {
KhrDescriptorUpdateTemplateFn {
cmd_push_descriptor_set_with_template_khr: self
.cmd_push_descriptor_set_with_template_khr,
}
}
}
impl KhrDescriptorUpdateTemplateFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDescriptorUpdateTemplateFn {
cmd_push_descriptor_set_with_template_khr: unsafe {
extern "system" fn cmd_push_descriptor_set_with_template_khr(
_command_buffer: CommandBuffer,
_descriptor_update_template: DescriptorUpdateTemplate,
_layout: PipelineLayout,
_set: u32,
_p_data: *const c_void,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_push_descriptor_set_with_template_khr)
))
}
let raw_name = stringify!(vkCmdPushDescriptorSetWithTemplateKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_push_descriptor_set_with_template_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html>"]
pub unsafe fn cmd_push_descriptor_set_with_template_khr(
&self,
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
p_data: *const c_void,
) -> c_void {
(self.cmd_push_descriptor_set_with_template_khr)(
command_buffer,
descriptor_update_template,
layout,
set,
p_data,
)
}
}
impl NvxDeviceGeneratedCommandsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_device_generated_commands\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdProcessCommandsNVX = extern "system" fn(
command_buffer: CommandBuffer,
p_process_commands_info: *const CmdProcessCommandsInfoNVX,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdReserveSpaceForCommandsNVX = extern "system" fn(
command_buffer: CommandBuffer,
p_reserve_space_info: *const CmdReserveSpaceForCommandsInfoNVX,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateIndirectCommandsLayoutNVX = extern "system" fn(
device: Device,
p_create_info: *const IndirectCommandsLayoutCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_indirect_commands_layout: *mut IndirectCommandsLayoutNVX,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyIndirectCommandsLayoutNVX = extern "system" fn(
device: Device,
indirect_commands_layout: IndirectCommandsLayoutNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateObjectTableNVX = extern "system" fn(
device: Device,
p_create_info: *const ObjectTableCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_object_table: *mut ObjectTableNVX,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyObjectTableNVX = extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkRegisterObjectsNVX = extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
pp_object_table_entries: *const *const ObjectTableEntryNVX,
p_object_indices: *const u32,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkUnregisterObjectsNVX = extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
p_object_entry_types: *const ObjectEntryTypeNVX,
p_object_indices: *const u32,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut DeviceGeneratedCommandsFeaturesNVX,
p_limits: *mut DeviceGeneratedCommandsLimitsNVX,
) -> c_void;
pub struct NvxDeviceGeneratedCommandsFn {
pub cmd_process_commands_nvx: extern "system" fn(
command_buffer: CommandBuffer,
p_process_commands_info: *const CmdProcessCommandsInfoNVX,
) -> c_void,
pub cmd_reserve_space_for_commands_nvx: extern "system" fn(
command_buffer: CommandBuffer,
p_reserve_space_info: *const CmdReserveSpaceForCommandsInfoNVX,
) -> c_void,
pub create_indirect_commands_layout_nvx: extern "system" fn(
device: Device,
p_create_info: *const IndirectCommandsLayoutCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_indirect_commands_layout: *mut IndirectCommandsLayoutNVX,
) -> Result,
pub destroy_indirect_commands_layout_nvx: extern "system" fn(
device: Device,
indirect_commands_layout: IndirectCommandsLayoutNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub create_object_table_nvx: extern "system" fn(
device: Device,
p_create_info: *const ObjectTableCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_object_table: *mut ObjectTableNVX,
) -> Result,
pub destroy_object_table_nvx: extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub register_objects_nvx: extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
pp_object_table_entries: *const *const ObjectTableEntryNVX,
p_object_indices: *const u32,
) -> Result,
pub unregister_objects_nvx: extern "system" fn(
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
p_object_entry_types: *const ObjectEntryTypeNVX,
p_object_indices: *const u32,
) -> Result,
pub get_physical_device_generated_commands_properties_nvx: extern "system" fn(
physical_device: PhysicalDevice,
p_features: *mut DeviceGeneratedCommandsFeaturesNVX,
p_limits: *mut DeviceGeneratedCommandsLimitsNVX,
) -> c_void,
}
unsafe impl Send for NvxDeviceGeneratedCommandsFn {}
unsafe impl Sync for NvxDeviceGeneratedCommandsFn {}
impl ::std::clone::Clone for NvxDeviceGeneratedCommandsFn {
fn clone(&self) -> Self {
NvxDeviceGeneratedCommandsFn {
cmd_process_commands_nvx: self.cmd_process_commands_nvx,
cmd_reserve_space_for_commands_nvx: self.cmd_reserve_space_for_commands_nvx,
create_indirect_commands_layout_nvx: self.create_indirect_commands_layout_nvx,
destroy_indirect_commands_layout_nvx: self.destroy_indirect_commands_layout_nvx,
create_object_table_nvx: self.create_object_table_nvx,
destroy_object_table_nvx: self.destroy_object_table_nvx,
register_objects_nvx: self.register_objects_nvx,
unregister_objects_nvx: self.unregister_objects_nvx,
get_physical_device_generated_commands_properties_nvx: self
.get_physical_device_generated_commands_properties_nvx,
}
}
}
impl NvxDeviceGeneratedCommandsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxDeviceGeneratedCommandsFn {
cmd_process_commands_nvx: unsafe {
extern "system" fn cmd_process_commands_nvx(
_command_buffer: CommandBuffer,
_p_process_commands_info: *const CmdProcessCommandsInfoNVX,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_process_commands_nvx)
))
}
let raw_name = stringify!(vkCmdProcessCommandsNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_process_commands_nvx
} else {
::std::mem::transmute(val)
}
},
cmd_reserve_space_for_commands_nvx: unsafe {
extern "system" fn cmd_reserve_space_for_commands_nvx(
_command_buffer: CommandBuffer,
_p_reserve_space_info: *const CmdReserveSpaceForCommandsInfoNVX,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_reserve_space_for_commands_nvx)
))
}
let raw_name = stringify!(vkCmdReserveSpaceForCommandsNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_reserve_space_for_commands_nvx
} else {
::std::mem::transmute(val)
}
},
create_indirect_commands_layout_nvx: unsafe {
extern "system" fn create_indirect_commands_layout_nvx(
_device: Device,
_p_create_info: *const IndirectCommandsLayoutCreateInfoNVX,
_p_allocator: *const AllocationCallbacks,
_p_indirect_commands_layout: *mut IndirectCommandsLayoutNVX,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_indirect_commands_layout_nvx)
))
}
let raw_name = stringify!(vkCreateIndirectCommandsLayoutNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_indirect_commands_layout_nvx
} else {
::std::mem::transmute(val)
}
},
destroy_indirect_commands_layout_nvx: unsafe {
extern "system" fn destroy_indirect_commands_layout_nvx(
_device: Device,
_indirect_commands_layout: IndirectCommandsLayoutNVX,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_indirect_commands_layout_nvx)
))
}
let raw_name = stringify!(vkDestroyIndirectCommandsLayoutNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_indirect_commands_layout_nvx
} else {
::std::mem::transmute(val)
}
},
create_object_table_nvx: unsafe {
extern "system" fn create_object_table_nvx(
_device: Device,
_p_create_info: *const ObjectTableCreateInfoNVX,
_p_allocator: *const AllocationCallbacks,
_p_object_table: *mut ObjectTableNVX,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_object_table_nvx)
))
}
let raw_name = stringify!(vkCreateObjectTableNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_object_table_nvx
} else {
::std::mem::transmute(val)
}
},
destroy_object_table_nvx: unsafe {
extern "system" fn destroy_object_table_nvx(
_device: Device,
_object_table: ObjectTableNVX,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_object_table_nvx)
))
}
let raw_name = stringify!(vkDestroyObjectTableNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_object_table_nvx
} else {
::std::mem::transmute(val)
}
},
register_objects_nvx: unsafe {
extern "system" fn register_objects_nvx(
_device: Device,
_object_table: ObjectTableNVX,
_object_count: u32,
_pp_object_table_entries: *const *const ObjectTableEntryNVX,
_p_object_indices: *const u32,
) -> Result {
panic!(concat!("Unable to load ", stringify!(register_objects_nvx)))
}
let raw_name = stringify!(vkRegisterObjectsNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
register_objects_nvx
} else {
::std::mem::transmute(val)
}
},
unregister_objects_nvx: unsafe {
extern "system" fn unregister_objects_nvx(
_device: Device,
_object_table: ObjectTableNVX,
_object_count: u32,
_p_object_entry_types: *const ObjectEntryTypeNVX,
_p_object_indices: *const u32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(unregister_objects_nvx)
))
}
let raw_name = stringify!(vkUnregisterObjectsNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
unregister_objects_nvx
} else {
::std::mem::transmute(val)
}
},
get_physical_device_generated_commands_properties_nvx: unsafe {
extern "system" fn get_physical_device_generated_commands_properties_nvx(
_physical_device: PhysicalDevice,
_p_features: *mut DeviceGeneratedCommandsFeaturesNVX,
_p_limits: *mut DeviceGeneratedCommandsLimitsNVX,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_generated_commands_properties_nvx)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_generated_commands_properties_nvx
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdProcessCommandsNVX.html>"]
pub unsafe fn cmd_process_commands_nvx(
&self,
command_buffer: CommandBuffer,
p_process_commands_info: *const CmdProcessCommandsInfoNVX,
) -> c_void {
(self.cmd_process_commands_nvx)(command_buffer, p_process_commands_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdReserveSpaceForCommandsNVX.html>"]
pub unsafe fn cmd_reserve_space_for_commands_nvx(
&self,
command_buffer: CommandBuffer,
p_reserve_space_info: *const CmdReserveSpaceForCommandsInfoNVX,
) -> c_void {
(self.cmd_reserve_space_for_commands_nvx)(command_buffer, p_reserve_space_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateIndirectCommandsLayoutNVX.html>"]
pub unsafe fn create_indirect_commands_layout_nvx(
&self,
device: Device,
p_create_info: *const IndirectCommandsLayoutCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_indirect_commands_layout: *mut IndirectCommandsLayoutNVX,
) -> Result {
(self.create_indirect_commands_layout_nvx)(
device,
p_create_info,
p_allocator,
p_indirect_commands_layout,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyIndirectCommandsLayoutNVX.html>"]
pub unsafe fn destroy_indirect_commands_layout_nvx(
&self,
device: Device,
indirect_commands_layout: IndirectCommandsLayoutNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_indirect_commands_layout_nvx)(device, indirect_commands_layout, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateObjectTableNVX.html>"]
pub unsafe fn create_object_table_nvx(
&self,
device: Device,
p_create_info: *const ObjectTableCreateInfoNVX,
p_allocator: *const AllocationCallbacks,
p_object_table: *mut ObjectTableNVX,
) -> Result {
(self.create_object_table_nvx)(device, p_create_info, p_allocator, p_object_table)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyObjectTableNVX.html>"]
pub unsafe fn destroy_object_table_nvx(
&self,
device: Device,
object_table: ObjectTableNVX,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_object_table_nvx)(device, object_table, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkRegisterObjectsNVX.html>"]
pub unsafe fn register_objects_nvx(
&self,
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
pp_object_table_entries: *const *const ObjectTableEntryNVX,
p_object_indices: *const u32,
) -> Result {
(self.register_objects_nvx)(
device,
object_table,
object_count,
pp_object_table_entries,
p_object_indices,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkUnregisterObjectsNVX.html>"]
pub unsafe fn unregister_objects_nvx(
&self,
device: Device,
object_table: ObjectTableNVX,
object_count: u32,
p_object_entry_types: *const ObjectEntryTypeNVX,
p_object_indices: *const u32,
) -> Result {
(self.unregister_objects_nvx)(
device,
object_table,
object_count,
p_object_entry_types,
p_object_indices,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX.html>"]
pub unsafe fn get_physical_device_generated_commands_properties_nvx(
&self,
physical_device: PhysicalDevice,
p_features: *mut DeviceGeneratedCommandsFeaturesNVX,
p_limits: *mut DeviceGeneratedCommandsLimitsNVX,
) -> c_void {
(self.get_physical_device_generated_commands_properties_nvx)(
physical_device,
p_features,
p_limits,
)
}
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const OBJECT_TABLE_CREATE_INFO_NVX: Self = StructureType(1000086000);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX: Self = StructureType(1000086001);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const CMD_PROCESS_COMMANDS_INFO_NVX: Self = StructureType(1000086002);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX: Self = StructureType(1000086003);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const DEVICE_GENERATED_COMMANDS_LIMITS_NVX: Self = StructureType(1000086004);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl StructureType {
pub const DEVICE_GENERATED_COMMANDS_FEATURES_NVX: Self = StructureType(1000086005);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl PipelineStageFlags {
pub const COMMAND_PROCESS_NVX: Self = PipelineStageFlags(0b100000000000000000);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl AccessFlags {
pub const COMMAND_PROCESS_READ_NVX: Self = AccessFlags(0b100000000000000000);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl AccessFlags {
pub const COMMAND_PROCESS_WRITE_NVX: Self = AccessFlags(0b1000000000000000000);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl ObjectType {
pub const OBJECT_TABLE_NVX: Self = ObjectType(1000086000);
}
#[doc = "Generated from \'VK_NVX_device_generated_commands\'"]
impl ObjectType {
pub const INDIRECT_COMMANDS_LAYOUT_NVX: Self = ObjectType(1000086001);
}
impl NvClipSpaceWScalingFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_clip_space_w_scaling\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetViewportWScalingNV = extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewport_w_scalings: *const ViewportWScalingNV,
) -> c_void;
pub struct NvClipSpaceWScalingFn {
pub cmd_set_viewport_w_scaling_nv: extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewport_w_scalings: *const ViewportWScalingNV,
) -> c_void,
}
unsafe impl Send for NvClipSpaceWScalingFn {}
unsafe impl Sync for NvClipSpaceWScalingFn {}
impl ::std::clone::Clone for NvClipSpaceWScalingFn {
fn clone(&self) -> Self {
NvClipSpaceWScalingFn {
cmd_set_viewport_w_scaling_nv: self.cmd_set_viewport_w_scaling_nv,
}
}
}
impl NvClipSpaceWScalingFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvClipSpaceWScalingFn {
cmd_set_viewport_w_scaling_nv: unsafe {
extern "system" fn cmd_set_viewport_w_scaling_nv(
_command_buffer: CommandBuffer,
_first_viewport: u32,
_viewport_count: u32,
_p_viewport_w_scalings: *const ViewportWScalingNV,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_viewport_w_scaling_nv)
))
}
let raw_name = stringify!(vkCmdSetViewportWScalingNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_viewport_w_scaling_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetViewportWScalingNV.html>"]
pub unsafe fn cmd_set_viewport_w_scaling_nv(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_viewport_w_scalings: *const ViewportWScalingNV,
) -> c_void {
(self.cmd_set_viewport_w_scaling_nv)(
command_buffer,
first_viewport,
viewport_count,
p_viewport_w_scalings,
)
}
}
#[doc = "Generated from \'VK_NV_clip_space_w_scaling\'"]
impl StructureType {
pub const PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: Self = StructureType(1000087000);
}
#[doc = "Generated from \'VK_NV_clip_space_w_scaling\'"]
impl DynamicState {
pub const VIEWPORT_W_SCALING_NV: Self = DynamicState(1000087000);
}
impl ExtDirectModeDisplayFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_direct_mode_display\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkReleaseDisplayEXT =
extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result;
pub struct ExtDirectModeDisplayFn {
pub release_display_ext:
extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result,
}
unsafe impl Send for ExtDirectModeDisplayFn {}
unsafe impl Sync for ExtDirectModeDisplayFn {}
impl ::std::clone::Clone for ExtDirectModeDisplayFn {
fn clone(&self) -> Self {
ExtDirectModeDisplayFn {
release_display_ext: self.release_display_ext,
}
}
}
impl ExtDirectModeDisplayFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDirectModeDisplayFn {
release_display_ext: unsafe {
extern "system" fn release_display_ext(
_physical_device: PhysicalDevice,
_display: DisplayKHR,
) -> Result {
panic!(concat!("Unable to load ", stringify!(release_display_ext)))
}
let raw_name = stringify!(vkReleaseDisplayEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
release_display_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkReleaseDisplayEXT.html>"]
pub unsafe fn release_display_ext(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
) -> Result {
(self.release_display_ext)(physical_device, display)
}
}
impl ExtAcquireXlibDisplayFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_acquire_xlib_display\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkAcquireXlibDisplayEXT = extern "system" fn(
physical_device: PhysicalDevice,
dpy: *mut Display,
display: DisplayKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetRandROutputDisplayEXT = extern "system" fn(
physical_device: PhysicalDevice,
dpy: *mut Display,
rr_output: RROutput,
p_display: *mut DisplayKHR,
) -> Result;
pub struct ExtAcquireXlibDisplayFn {
pub acquire_xlib_display_ext: extern "system" fn(
physical_device: PhysicalDevice,
dpy: *mut Display,
display: DisplayKHR,
) -> Result,
pub get_rand_r_output_display_ext: extern "system" fn(
physical_device: PhysicalDevice,
dpy: *mut Display,
rr_output: RROutput,
p_display: *mut DisplayKHR,
) -> Result,
}
unsafe impl Send for ExtAcquireXlibDisplayFn {}
unsafe impl Sync for ExtAcquireXlibDisplayFn {}
impl ::std::clone::Clone for ExtAcquireXlibDisplayFn {
fn clone(&self) -> Self {
ExtAcquireXlibDisplayFn {
acquire_xlib_display_ext: self.acquire_xlib_display_ext,
get_rand_r_output_display_ext: self.get_rand_r_output_display_ext,
}
}
}
impl ExtAcquireXlibDisplayFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtAcquireXlibDisplayFn {
acquire_xlib_display_ext: unsafe {
extern "system" fn acquire_xlib_display_ext(
_physical_device: PhysicalDevice,
_dpy: *mut Display,
_display: DisplayKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_xlib_display_ext)
))
}
let raw_name = stringify!(vkAcquireXlibDisplayEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
acquire_xlib_display_ext
} else {
::std::mem::transmute(val)
}
},
get_rand_r_output_display_ext: unsafe {
extern "system" fn get_rand_r_output_display_ext(
_physical_device: PhysicalDevice,
_dpy: *mut Display,
_rr_output: RROutput,
_p_display: *mut DisplayKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_rand_r_output_display_ext)
))
}
let raw_name = stringify!(vkGetRandROutputDisplayEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_rand_r_output_display_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkAcquireXlibDisplayEXT.html>"]
pub unsafe fn acquire_xlib_display_ext(
&self,
physical_device: PhysicalDevice,
dpy: *mut Display,
display: DisplayKHR,
) -> Result {
(self.acquire_xlib_display_ext)(physical_device, dpy, display)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetRandROutputDisplayEXT.html>"]
pub unsafe fn get_rand_r_output_display_ext(
&self,
physical_device: PhysicalDevice,
dpy: *mut Display,
rr_output: RROutput,
p_display: *mut DisplayKHR,
) -> Result {
(self.get_rand_r_output_display_ext)(physical_device, dpy, rr_output, p_display)
}
}
impl ExtDisplaySurfaceCounterFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_display_surface_counter\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT = extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilities2EXT,
) -> Result;
pub struct ExtDisplaySurfaceCounterFn {
pub get_physical_device_surface_capabilities2_ext: extern "system" fn(
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilities2EXT,
) -> Result,
}
unsafe impl Send for ExtDisplaySurfaceCounterFn {}
unsafe impl Sync for ExtDisplaySurfaceCounterFn {}
impl ::std::clone::Clone for ExtDisplaySurfaceCounterFn {
fn clone(&self) -> Self {
ExtDisplaySurfaceCounterFn {
get_physical_device_surface_capabilities2_ext: self
.get_physical_device_surface_capabilities2_ext,
}
}
}
impl ExtDisplaySurfaceCounterFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDisplaySurfaceCounterFn {
get_physical_device_surface_capabilities2_ext: unsafe {
extern "system" fn get_physical_device_surface_capabilities2_ext(
_physical_device: PhysicalDevice,
_surface: SurfaceKHR,
_p_surface_capabilities: *mut SurfaceCapabilities2EXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_capabilities2_ext)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceCapabilities2EXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_capabilities2_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html>"]
pub unsafe fn get_physical_device_surface_capabilities2_ext(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
p_surface_capabilities: *mut SurfaceCapabilities2EXT,
) -> Result {
(self.get_physical_device_surface_capabilities2_ext)(
physical_device,
surface,
p_surface_capabilities,
)
}
}
#[doc = "Generated from \'VK_EXT_display_surface_counter\'"]
impl StructureType {
pub const SURFACE_CAPABILITIES_2_EXT: Self = StructureType(1000090000);
}
impl ExtDisplayControlFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_display_control\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkDisplayPowerControlEXT = extern "system" fn(
device: Device,
display: DisplayKHR,
p_display_power_info: *const DisplayPowerInfoEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkRegisterDeviceEventEXT = extern "system" fn(
device: Device,
p_device_event_info: *const DeviceEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkRegisterDisplayEventEXT = extern "system" fn(
device: Device,
display: DisplayKHR,
p_display_event_info: *const DisplayEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetSwapchainCounterEXT = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
counter: SurfaceCounterFlagsEXT,
p_counter_value: *mut u64,
) -> Result;
pub struct ExtDisplayControlFn {
pub display_power_control_ext: extern "system" fn(
device: Device,
display: DisplayKHR,
p_display_power_info: *const DisplayPowerInfoEXT,
) -> Result,
pub register_device_event_ext: extern "system" fn(
device: Device,
p_device_event_info: *const DeviceEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result,
pub register_display_event_ext: extern "system" fn(
device: Device,
display: DisplayKHR,
p_display_event_info: *const DisplayEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result,
pub get_swapchain_counter_ext: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
counter: SurfaceCounterFlagsEXT,
p_counter_value: *mut u64,
) -> Result,
}
unsafe impl Send for ExtDisplayControlFn {}
unsafe impl Sync for ExtDisplayControlFn {}
impl ::std::clone::Clone for ExtDisplayControlFn {
fn clone(&self) -> Self {
ExtDisplayControlFn {
display_power_control_ext: self.display_power_control_ext,
register_device_event_ext: self.register_device_event_ext,
register_display_event_ext: self.register_display_event_ext,
get_swapchain_counter_ext: self.get_swapchain_counter_ext,
}
}
}
impl ExtDisplayControlFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDisplayControlFn {
display_power_control_ext: unsafe {
extern "system" fn display_power_control_ext(
_device: Device,
_display: DisplayKHR,
_p_display_power_info: *const DisplayPowerInfoEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(display_power_control_ext)
))
}
let raw_name = stringify!(vkDisplayPowerControlEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
display_power_control_ext
} else {
::std::mem::transmute(val)
}
},
register_device_event_ext: unsafe {
extern "system" fn register_device_event_ext(
_device: Device,
_p_device_event_info: *const DeviceEventInfoEXT,
_p_allocator: *const AllocationCallbacks,
_p_fence: *mut Fence,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(register_device_event_ext)
))
}
let raw_name = stringify!(vkRegisterDeviceEventEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
register_device_event_ext
} else {
::std::mem::transmute(val)
}
},
register_display_event_ext: unsafe {
extern "system" fn register_display_event_ext(
_device: Device,
_display: DisplayKHR,
_p_display_event_info: *const DisplayEventInfoEXT,
_p_allocator: *const AllocationCallbacks,
_p_fence: *mut Fence,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(register_display_event_ext)
))
}
let raw_name = stringify!(vkRegisterDisplayEventEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
register_display_event_ext
} else {
::std::mem::transmute(val)
}
},
get_swapchain_counter_ext: unsafe {
extern "system" fn get_swapchain_counter_ext(
_device: Device,
_swapchain: SwapchainKHR,
_counter: SurfaceCounterFlagsEXT,
_p_counter_value: *mut u64,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_counter_ext)
))
}
let raw_name = stringify!(vkGetSwapchainCounterEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_swapchain_counter_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDisplayPowerControlEXT.html>"]
pub unsafe fn display_power_control_ext(
&self,
device: Device,
display: DisplayKHR,
p_display_power_info: *const DisplayPowerInfoEXT,
) -> Result {
(self.display_power_control_ext)(device, display, p_display_power_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkRegisterDeviceEventEXT.html>"]
pub unsafe fn register_device_event_ext(
&self,
device: Device,
p_device_event_info: *const DeviceEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result {
(self.register_device_event_ext)(device, p_device_event_info, p_allocator, p_fence)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkRegisterDisplayEventEXT.html>"]
pub unsafe fn register_display_event_ext(
&self,
device: Device,
display: DisplayKHR,
p_display_event_info: *const DisplayEventInfoEXT,
p_allocator: *const AllocationCallbacks,
p_fence: *mut Fence,
) -> Result {
(self.register_display_event_ext)(
device,
display,
p_display_event_info,
p_allocator,
p_fence,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSwapchainCounterEXT.html>"]
pub unsafe fn get_swapchain_counter_ext(
&self,
device: Device,
swapchain: SwapchainKHR,
counter: SurfaceCounterFlagsEXT,
p_counter_value: *mut u64,
) -> Result {
(self.get_swapchain_counter_ext)(device, swapchain, counter, p_counter_value)
}
}
#[doc = "Generated from \'VK_EXT_display_control\'"]
impl StructureType {
pub const DISPLAY_POWER_INFO_EXT: Self = StructureType(1000091000);
}
#[doc = "Generated from \'VK_EXT_display_control\'"]
impl StructureType {
pub const DEVICE_EVENT_INFO_EXT: Self = StructureType(1000091001);
}
#[doc = "Generated from \'VK_EXT_display_control\'"]
impl StructureType {
pub const DISPLAY_EVENT_INFO_EXT: Self = StructureType(1000091002);
}
#[doc = "Generated from \'VK_EXT_display_control\'"]
impl StructureType {
pub const SWAPCHAIN_COUNTER_CREATE_INFO_EXT: Self = StructureType(1000091003);
}
impl GoogleDisplayTimingFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_display_timing\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetRefreshCycleDurationGOOGLE = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPastPresentationTimingGOOGLE = extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_presentation_timing_count: *mut u32,
p_presentation_timings: *mut PastPresentationTimingGOOGLE,
) -> Result;
pub struct GoogleDisplayTimingFn {
pub get_refresh_cycle_duration_google: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
) -> Result,
pub get_past_presentation_timing_google: extern "system" fn(
device: Device,
swapchain: SwapchainKHR,
p_presentation_timing_count: *mut u32,
p_presentation_timings: *mut PastPresentationTimingGOOGLE,
) -> Result,
}
unsafe impl Send for GoogleDisplayTimingFn {}
unsafe impl Sync for GoogleDisplayTimingFn {}
impl ::std::clone::Clone for GoogleDisplayTimingFn {
fn clone(&self) -> Self {
GoogleDisplayTimingFn {
get_refresh_cycle_duration_google: self.get_refresh_cycle_duration_google,
get_past_presentation_timing_google: self.get_past_presentation_timing_google,
}
}
}
impl GoogleDisplayTimingFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleDisplayTimingFn {
get_refresh_cycle_duration_google: unsafe {
extern "system" fn get_refresh_cycle_duration_google(
_device: Device,
_swapchain: SwapchainKHR,
_p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_refresh_cycle_duration_google)
))
}
let raw_name = stringify!(vkGetRefreshCycleDurationGOOGLE);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_refresh_cycle_duration_google
} else {
::std::mem::transmute(val)
}
},
get_past_presentation_timing_google: unsafe {
extern "system" fn get_past_presentation_timing_google(
_device: Device,
_swapchain: SwapchainKHR,
_p_presentation_timing_count: *mut u32,
_p_presentation_timings: *mut PastPresentationTimingGOOGLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_past_presentation_timing_google)
))
}
let raw_name = stringify!(vkGetPastPresentationTimingGOOGLE);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_past_presentation_timing_google
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetRefreshCycleDurationGOOGLE.html>"]
pub unsafe fn get_refresh_cycle_duration_google(
&self,
device: Device,
swapchain: SwapchainKHR,
p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
) -> Result {
(self.get_refresh_cycle_duration_google)(device, swapchain, p_display_timing_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPastPresentationTimingGOOGLE.html>"]
pub unsafe fn get_past_presentation_timing_google(
&self,
device: Device,
swapchain: SwapchainKHR,
p_presentation_timing_count: *mut u32,
p_presentation_timings: *mut PastPresentationTimingGOOGLE,
) -> Result {
(self.get_past_presentation_timing_google)(
device,
swapchain,
p_presentation_timing_count,
p_presentation_timings,
)
}
}
#[doc = "Generated from \'VK_GOOGLE_display_timing\'"]
impl StructureType {
pub const PRESENT_TIMES_INFO_GOOGLE: Self = StructureType(1000092000);
}
impl NvSampleMaskOverrideCoverageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_sample_mask_override_coverage\0")
.expect("Wrong extension string")
}
}
pub struct NvSampleMaskOverrideCoverageFn {}
unsafe impl Send for NvSampleMaskOverrideCoverageFn {}
unsafe impl Sync for NvSampleMaskOverrideCoverageFn {}
impl ::std::clone::Clone for NvSampleMaskOverrideCoverageFn {
fn clone(&self) -> Self {
NvSampleMaskOverrideCoverageFn {}
}
}
impl NvSampleMaskOverrideCoverageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvSampleMaskOverrideCoverageFn {}
}
}
impl NvGeometryShaderPassthroughFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_geometry_shader_passthrough\0")
.expect("Wrong extension string")
}
}
pub struct NvGeometryShaderPassthroughFn {}
unsafe impl Send for NvGeometryShaderPassthroughFn {}
unsafe impl Sync for NvGeometryShaderPassthroughFn {}
impl ::std::clone::Clone for NvGeometryShaderPassthroughFn {
fn clone(&self) -> Self {
NvGeometryShaderPassthroughFn {}
}
}
impl NvGeometryShaderPassthroughFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvGeometryShaderPassthroughFn {}
}
}
impl NvViewportArray2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_viewport_array2\0")
.expect("Wrong extension string")
}
}
pub struct NvViewportArray2Fn {}
unsafe impl Send for NvViewportArray2Fn {}
unsafe impl Sync for NvViewportArray2Fn {}
impl ::std::clone::Clone for NvViewportArray2Fn {
fn clone(&self) -> Self {
NvViewportArray2Fn {}
}
}
impl NvViewportArray2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvViewportArray2Fn {}
}
}
impl NvxMultiviewPerViewAttributesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NVX_multiview_per_view_attributes\0")
.expect("Wrong extension string")
}
}
pub struct NvxMultiviewPerViewAttributesFn {}
unsafe impl Send for NvxMultiviewPerViewAttributesFn {}
unsafe impl Sync for NvxMultiviewPerViewAttributesFn {}
impl ::std::clone::Clone for NvxMultiviewPerViewAttributesFn {
fn clone(&self) -> Self {
NvxMultiviewPerViewAttributesFn {}
}
}
impl NvxMultiviewPerViewAttributesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvxMultiviewPerViewAttributesFn {}
}
}
#[doc = "Generated from \'VK_NVX_multiview_per_view_attributes\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: Self =
StructureType(1000097000);
}
#[doc = "Generated from \'VK_NVX_multiview_per_view_attributes\'"]
impl SubpassDescriptionFlags {
pub const PER_VIEW_ATTRIBUTES_NVX: Self = SubpassDescriptionFlags(0b1);
}
#[doc = "Generated from \'VK_NVX_multiview_per_view_attributes\'"]
impl SubpassDescriptionFlags {
pub const PER_VIEW_POSITION_X_ONLY_NVX: Self = SubpassDescriptionFlags(0b10);
}
impl NvViewportSwizzleFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_viewport_swizzle\0")
.expect("Wrong extension string")
}
}
pub struct NvViewportSwizzleFn {}
unsafe impl Send for NvViewportSwizzleFn {}
unsafe impl Sync for NvViewportSwizzleFn {}
impl ::std::clone::Clone for NvViewportSwizzleFn {
fn clone(&self) -> Self {
NvViewportSwizzleFn {}
}
}
impl NvViewportSwizzleFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvViewportSwizzleFn {}
}
}
#[doc = "Generated from \'VK_NV_viewport_swizzle\'"]
impl StructureType {
pub const PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: Self = StructureType(1000098000);
}
impl ExtDiscardRectanglesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_discard_rectangles\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetDiscardRectangleEXT = extern "system" fn(
command_buffer: CommandBuffer,
first_discard_rectangle: u32,
discard_rectangle_count: u32,
p_discard_rectangles: *const Rect2D,
) -> c_void;
pub struct ExtDiscardRectanglesFn {
pub cmd_set_discard_rectangle_ext: extern "system" fn(
command_buffer: CommandBuffer,
first_discard_rectangle: u32,
discard_rectangle_count: u32,
p_discard_rectangles: *const Rect2D,
) -> c_void,
}
unsafe impl Send for ExtDiscardRectanglesFn {}
unsafe impl Sync for ExtDiscardRectanglesFn {}
impl ::std::clone::Clone for ExtDiscardRectanglesFn {
fn clone(&self) -> Self {
ExtDiscardRectanglesFn {
cmd_set_discard_rectangle_ext: self.cmd_set_discard_rectangle_ext,
}
}
}
impl ExtDiscardRectanglesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDiscardRectanglesFn {
cmd_set_discard_rectangle_ext: unsafe {
extern "system" fn cmd_set_discard_rectangle_ext(
_command_buffer: CommandBuffer,
_first_discard_rectangle: u32,
_discard_rectangle_count: u32,
_p_discard_rectangles: *const Rect2D,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_discard_rectangle_ext)
))
}
let raw_name = stringify!(vkCmdSetDiscardRectangleEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_discard_rectangle_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetDiscardRectangleEXT.html>"]
pub unsafe fn cmd_set_discard_rectangle_ext(
&self,
command_buffer: CommandBuffer,
first_discard_rectangle: u32,
discard_rectangle_count: u32,
p_discard_rectangles: *const Rect2D,
) -> c_void {
(self.cmd_set_discard_rectangle_ext)(
command_buffer,
first_discard_rectangle,
discard_rectangle_count,
p_discard_rectangles,
)
}
}
#[doc = "Generated from \'VK_EXT_discard_rectangles\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: Self = StructureType(1000099000);
}
#[doc = "Generated from \'VK_EXT_discard_rectangles\'"]
impl StructureType {
pub const PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: Self = StructureType(1000099001);
}
#[doc = "Generated from \'VK_EXT_discard_rectangles\'"]
impl DynamicState {
pub const DISCARD_RECTANGLE_EXT: Self = DynamicState(1000099000);
}
impl NvExtension101Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_101\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension101Fn {}
unsafe impl Send for NvExtension101Fn {}
unsafe impl Sync for NvExtension101Fn {}
impl ::std::clone::Clone for NvExtension101Fn {
fn clone(&self) -> Self {
NvExtension101Fn {}
}
}
impl NvExtension101Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension101Fn {}
}
}
impl ExtConservativeRasterizationFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_conservative_rasterization\0")
.expect("Wrong extension string")
}
}
pub struct ExtConservativeRasterizationFn {}
unsafe impl Send for ExtConservativeRasterizationFn {}
unsafe impl Sync for ExtConservativeRasterizationFn {}
impl ::std::clone::Clone for ExtConservativeRasterizationFn {
fn clone(&self) -> Self {
ExtConservativeRasterizationFn {}
}
}
impl ExtConservativeRasterizationFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtConservativeRasterizationFn {}
}
}
#[doc = "Generated from \'VK_EXT_conservative_rasterization\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT: Self =
StructureType(1000101000);
}
#[doc = "Generated from \'VK_EXT_conservative_rasterization\'"]
impl StructureType {
pub const PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: Self =
StructureType(1000101001);
}
impl NvExtension103Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_103\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension103Fn {}
unsafe impl Send for NvExtension103Fn {}
unsafe impl Sync for NvExtension103Fn {}
impl ::std::clone::Clone for NvExtension103Fn {
fn clone(&self) -> Self {
NvExtension103Fn {}
}
}
impl NvExtension103Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension103Fn {}
}
}
impl NvExtension104Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_104\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension104Fn {}
unsafe impl Send for NvExtension104Fn {}
unsafe impl Sync for NvExtension104Fn {}
impl ::std::clone::Clone for NvExtension104Fn {
fn clone(&self) -> Self {
NvExtension104Fn {}
}
}
impl NvExtension104Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension104Fn {}
}
}
impl ExtSwapchainColorspaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_swapchain_colorspace\0")
.expect("Wrong extension string")
}
}
pub struct ExtSwapchainColorspaceFn {}
unsafe impl Send for ExtSwapchainColorspaceFn {}
unsafe impl Sync for ExtSwapchainColorspaceFn {}
impl ::std::clone::Clone for ExtSwapchainColorspaceFn {
fn clone(&self) -> Self {
ExtSwapchainColorspaceFn {}
}
}
impl ExtSwapchainColorspaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtSwapchainColorspaceFn {}
}
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const DISPLAY_P3_NONLINEAR_EXT: Self = ColorSpaceKHR(1000104001);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const EXTENDED_SRGB_LINEAR_EXT: Self = ColorSpaceKHR(1000104002);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const DCI_P3_LINEAR_EXT: Self = ColorSpaceKHR(1000104003);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const DCI_P3_NONLINEAR_EXT: Self = ColorSpaceKHR(1000104004);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const BT709_LINEAR_EXT: Self = ColorSpaceKHR(1000104005);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const BT709_NONLINEAR_EXT: Self = ColorSpaceKHR(1000104006);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const BT2020_LINEAR_EXT: Self = ColorSpaceKHR(1000104007);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const HDR10_ST2084_EXT: Self = ColorSpaceKHR(1000104008);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const DOLBYVISION_EXT: Self = ColorSpaceKHR(1000104009);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const HDR10_HLG_EXT: Self = ColorSpaceKHR(1000104010);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const ADOBERGB_LINEAR_EXT: Self = ColorSpaceKHR(1000104011);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const ADOBERGB_NONLINEAR_EXT: Self = ColorSpaceKHR(1000104012);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const PASS_THROUGH_EXT: Self = ColorSpaceKHR(1000104013);
}
#[doc = "Generated from \'VK_EXT_swapchain_colorspace\'"]
impl ColorSpaceKHR {
pub const EXTENDED_SRGB_NONLINEAR_EXT: Self = ColorSpaceKHR(1000104014);
}
impl ExtHdrMetadataFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_hdr_metadata\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkSetHdrMetadataEXT = extern "system" fn(
device: Device,
swapchain_count: u32,
p_swapchains: *const SwapchainKHR,
p_metadata: *const HdrMetadataEXT,
) -> c_void;
pub struct ExtHdrMetadataFn {
pub set_hdr_metadata_ext: extern "system" fn(
device: Device,
swapchain_count: u32,
p_swapchains: *const SwapchainKHR,
p_metadata: *const HdrMetadataEXT,
) -> c_void,
}
unsafe impl Send for ExtHdrMetadataFn {}
unsafe impl Sync for ExtHdrMetadataFn {}
impl ::std::clone::Clone for ExtHdrMetadataFn {
fn clone(&self) -> Self {
ExtHdrMetadataFn {
set_hdr_metadata_ext: self.set_hdr_metadata_ext,
}
}
}
impl ExtHdrMetadataFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtHdrMetadataFn {
set_hdr_metadata_ext: unsafe {
extern "system" fn set_hdr_metadata_ext(
_device: Device,
_swapchain_count: u32,
_p_swapchains: *const SwapchainKHR,
_p_metadata: *const HdrMetadataEXT,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(set_hdr_metadata_ext)))
}
let raw_name = stringify!(vkSetHdrMetadataEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
set_hdr_metadata_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkSetHdrMetadataEXT.html>"]
pub unsafe fn set_hdr_metadata_ext(
&self,
device: Device,
swapchain_count: u32,
p_swapchains: *const SwapchainKHR,
p_metadata: *const HdrMetadataEXT,
) -> c_void {
(self.set_hdr_metadata_ext)(device, swapchain_count, p_swapchains, p_metadata)
}
}
#[doc = "Generated from \'VK_EXT_hdr_metadata\'"]
impl StructureType {
pub const HDR_METADATA_EXT: Self = StructureType(1000105000);
}
impl ImgExtension107Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_extension_107\0")
.expect("Wrong extension string")
}
}
pub struct ImgExtension107Fn {}
unsafe impl Send for ImgExtension107Fn {}
unsafe impl Sync for ImgExtension107Fn {}
impl ::std::clone::Clone for ImgExtension107Fn {
fn clone(&self) -> Self {
ImgExtension107Fn {}
}
}
impl ImgExtension107Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgExtension107Fn {}
}
}
impl ImgExtension108Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_extension_108\0")
.expect("Wrong extension string")
}
}
pub struct ImgExtension108Fn {}
unsafe impl Send for ImgExtension108Fn {}
unsafe impl Sync for ImgExtension108Fn {}
impl ::std::clone::Clone for ImgExtension108Fn {
fn clone(&self) -> Self {
ImgExtension108Fn {}
}
}
impl ImgExtension108Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgExtension108Fn {}
}
}
impl ImgExtension109Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_extension_109\0")
.expect("Wrong extension string")
}
}
pub struct ImgExtension109Fn {}
unsafe impl Send for ImgExtension109Fn {}
unsafe impl Sync for ImgExtension109Fn {}
impl ::std::clone::Clone for ImgExtension109Fn {
fn clone(&self) -> Self {
ImgExtension109Fn {}
}
}
impl ImgExtension109Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgExtension109Fn {}
}
}
impl KhrCreateRenderpass2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_create_renderpass2\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateRenderPass2KHR = extern "system" fn(
device: Device,
p_create_info: *const RenderPassCreateInfo2KHR,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginRenderPass2KHR = extern "system" fn(
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdNextSubpass2KHR = extern "system" fn(
command_buffer: CommandBuffer,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndRenderPass2KHR = extern "system" fn(
command_buffer: CommandBuffer,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void;
pub struct KhrCreateRenderpass2Fn {
pub create_render_pass2_khr: extern "system" fn(
device: Device,
p_create_info: *const RenderPassCreateInfo2KHR,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result,
pub cmd_begin_render_pass2_khr: extern "system" fn(
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
) -> c_void,
pub cmd_next_subpass2_khr: extern "system" fn(
command_buffer: CommandBuffer,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void,
pub cmd_end_render_pass2_khr: extern "system" fn(
command_buffer: CommandBuffer,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void,
}
unsafe impl Send for KhrCreateRenderpass2Fn {}
unsafe impl Sync for KhrCreateRenderpass2Fn {}
impl ::std::clone::Clone for KhrCreateRenderpass2Fn {
fn clone(&self) -> Self {
KhrCreateRenderpass2Fn {
create_render_pass2_khr: self.create_render_pass2_khr,
cmd_begin_render_pass2_khr: self.cmd_begin_render_pass2_khr,
cmd_next_subpass2_khr: self.cmd_next_subpass2_khr,
cmd_end_render_pass2_khr: self.cmd_end_render_pass2_khr,
}
}
}
impl KhrCreateRenderpass2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrCreateRenderpass2Fn {
create_render_pass2_khr: unsafe {
extern "system" fn create_render_pass2_khr(
_device: Device,
_p_create_info: *const RenderPassCreateInfo2KHR,
_p_allocator: *const AllocationCallbacks,
_p_render_pass: *mut RenderPass,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_render_pass2_khr)
))
}
let raw_name = stringify!(vkCreateRenderPass2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_render_pass2_khr
} else {
::std::mem::transmute(val)
}
},
cmd_begin_render_pass2_khr: unsafe {
extern "system" fn cmd_begin_render_pass2_khr(
_command_buffer: CommandBuffer,
_p_render_pass_begin: *const RenderPassBeginInfo,
_p_subpass_begin_info: *const SubpassBeginInfoKHR,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_render_pass2_khr)
))
}
let raw_name = stringify!(vkCmdBeginRenderPass2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_render_pass2_khr
} else {
::std::mem::transmute(val)
}
},
cmd_next_subpass2_khr: unsafe {
extern "system" fn cmd_next_subpass2_khr(
_command_buffer: CommandBuffer,
_p_subpass_begin_info: *const SubpassBeginInfoKHR,
_p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_next_subpass2_khr)
))
}
let raw_name = stringify!(vkCmdNextSubpass2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_next_subpass2_khr
} else {
::std::mem::transmute(val)
}
},
cmd_end_render_pass2_khr: unsafe {
extern "system" fn cmd_end_render_pass2_khr(
_command_buffer: CommandBuffer,
_p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_render_pass2_khr)
))
}
let raw_name = stringify!(vkCmdEndRenderPass2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_render_pass2_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateRenderPass2KHR.html>"]
pub unsafe fn create_render_pass2_khr(
&self,
device: Device,
p_create_info: *const RenderPassCreateInfo2KHR,
p_allocator: *const AllocationCallbacks,
p_render_pass: *mut RenderPass,
) -> Result {
(self.create_render_pass2_khr)(device, p_create_info, p_allocator, p_render_pass)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginRenderPass2KHR.html>"]
pub unsafe fn cmd_begin_render_pass2_khr(
&self,
command_buffer: CommandBuffer,
p_render_pass_begin: *const RenderPassBeginInfo,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
) -> c_void {
(self.cmd_begin_render_pass2_khr)(command_buffer, p_render_pass_begin, p_subpass_begin_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdNextSubpass2KHR.html>"]
pub unsafe fn cmd_next_subpass2_khr(
&self,
command_buffer: CommandBuffer,
p_subpass_begin_info: *const SubpassBeginInfoKHR,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void {
(self.cmd_next_subpass2_khr)(command_buffer, p_subpass_begin_info, p_subpass_end_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndRenderPass2KHR.html>"]
pub unsafe fn cmd_end_render_pass2_khr(
&self,
command_buffer: CommandBuffer,
p_subpass_end_info: *const SubpassEndInfoKHR,
) -> c_void {
(self.cmd_end_render_pass2_khr)(command_buffer, p_subpass_end_info)
}
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const ATTACHMENT_DESCRIPTION_2_KHR: Self = StructureType(1000109000);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const ATTACHMENT_REFERENCE_2_KHR: Self = StructureType(1000109001);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const SUBPASS_DESCRIPTION_2_KHR: Self = StructureType(1000109002);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const SUBPASS_DEPENDENCY_2_KHR: Self = StructureType(1000109003);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const RENDER_PASS_CREATE_INFO_2_KHR: Self = StructureType(1000109004);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const SUBPASS_BEGIN_INFO_KHR: Self = StructureType(1000109005);
}
#[doc = "Generated from \'VK_KHR_create_renderpass2\'"]
impl StructureType {
pub const SUBPASS_END_INFO_KHR: Self = StructureType(1000109006);
}
impl ImgExtension111Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_IMG_extension_111\0")
.expect("Wrong extension string")
}
}
pub struct ImgExtension111Fn {}
unsafe impl Send for ImgExtension111Fn {}
unsafe impl Sync for ImgExtension111Fn {}
impl ::std::clone::Clone for ImgExtension111Fn {
fn clone(&self) -> Self {
ImgExtension111Fn {}
}
}
impl ImgExtension111Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ImgExtension111Fn {}
}
}
impl KhrSharedPresentableImageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_shared_presentable_image\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetSwapchainStatusKHR =
extern "system" fn(device: Device, swapchain: SwapchainKHR) -> Result;
pub struct KhrSharedPresentableImageFn {
pub get_swapchain_status_khr:
extern "system" fn(device: Device, swapchain: SwapchainKHR) -> Result,
}
unsafe impl Send for KhrSharedPresentableImageFn {}
unsafe impl Sync for KhrSharedPresentableImageFn {}
impl ::std::clone::Clone for KhrSharedPresentableImageFn {
fn clone(&self) -> Self {
KhrSharedPresentableImageFn {
get_swapchain_status_khr: self.get_swapchain_status_khr,
}
}
}
impl KhrSharedPresentableImageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSharedPresentableImageFn {
get_swapchain_status_khr: unsafe {
extern "system" fn get_swapchain_status_khr(
_device: Device,
_swapchain: SwapchainKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_status_khr)
))
}
let raw_name = stringify!(vkGetSwapchainStatusKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_swapchain_status_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetSwapchainStatusKHR.html>"]
pub unsafe fn get_swapchain_status_khr(
&self,
device: Device,
swapchain: SwapchainKHR,
) -> Result {
(self.get_swapchain_status_khr)(device, swapchain)
}
}
#[doc = "Generated from \'VK_KHR_shared_presentable_image\'"]
impl StructureType {
pub const SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: Self = StructureType(1000111000);
}
#[doc = "Generated from \'VK_KHR_shared_presentable_image\'"]
impl PresentModeKHR {
pub const SHARED_DEMAND_REFRESH: Self = PresentModeKHR(1000111000);
}
#[doc = "Generated from \'VK_KHR_shared_presentable_image\'"]
impl PresentModeKHR {
pub const SHARED_CONTINUOUS_REFRESH: Self = PresentModeKHR(1000111001);
}
#[doc = "Generated from \'VK_KHR_shared_presentable_image\'"]
impl ImageLayout {
pub const SHARED_PRESENT_KHR: Self = ImageLayout(1000111000);
}
impl KhrExternalFenceCapabilitiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_fence_capabilities\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalFenceCapabilitiesFn {}
unsafe impl Send for KhrExternalFenceCapabilitiesFn {}
unsafe impl Sync for KhrExternalFenceCapabilitiesFn {}
impl ::std::clone::Clone for KhrExternalFenceCapabilitiesFn {
fn clone(&self) -> Self {
KhrExternalFenceCapabilitiesFn {}
}
}
impl KhrExternalFenceCapabilitiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalFenceCapabilitiesFn {}
}
}
impl KhrExternalFenceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_fence\0")
.expect("Wrong extension string")
}
}
pub struct KhrExternalFenceFn {}
unsafe impl Send for KhrExternalFenceFn {}
unsafe impl Sync for KhrExternalFenceFn {}
impl ::std::clone::Clone for KhrExternalFenceFn {
fn clone(&self) -> Self {
KhrExternalFenceFn {}
}
}
impl KhrExternalFenceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalFenceFn {}
}
}
impl KhrExternalFenceWin32Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_fence_win32\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkImportFenceWin32HandleKHR = extern "system" fn(
device: Device,
p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetFenceWin32HandleKHR = extern "system" fn(
device: Device,
p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result;
pub struct KhrExternalFenceWin32Fn {
pub import_fence_win32_handle_khr: extern "system" fn(
device: Device,
p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
) -> Result,
pub get_fence_win32_handle_khr: extern "system" fn(
device: Device,
p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result,
}
unsafe impl Send for KhrExternalFenceWin32Fn {}
unsafe impl Sync for KhrExternalFenceWin32Fn {}
impl ::std::clone::Clone for KhrExternalFenceWin32Fn {
fn clone(&self) -> Self {
KhrExternalFenceWin32Fn {
import_fence_win32_handle_khr: self.import_fence_win32_handle_khr,
get_fence_win32_handle_khr: self.get_fence_win32_handle_khr,
}
}
}
impl KhrExternalFenceWin32Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalFenceWin32Fn {
import_fence_win32_handle_khr: unsafe {
extern "system" fn import_fence_win32_handle_khr(
_device: Device,
_p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(import_fence_win32_handle_khr)
))
}
let raw_name = stringify!(vkImportFenceWin32HandleKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
import_fence_win32_handle_khr
} else {
::std::mem::transmute(val)
}
},
get_fence_win32_handle_khr: unsafe {
extern "system" fn get_fence_win32_handle_khr(
_device: Device,
_p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
_p_handle: *mut HANDLE,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_fence_win32_handle_khr)
))
}
let raw_name = stringify!(vkGetFenceWin32HandleKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_fence_win32_handle_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkImportFenceWin32HandleKHR.html>"]
pub unsafe fn import_fence_win32_handle_khr(
&self,
device: Device,
p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
) -> Result {
(self.import_fence_win32_handle_khr)(device, p_import_fence_win32_handle_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetFenceWin32HandleKHR.html>"]
pub unsafe fn get_fence_win32_handle_khr(
&self,
device: Device,
p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
p_handle: *mut HANDLE,
) -> Result {
(self.get_fence_win32_handle_khr)(device, p_get_win32_handle_info, p_handle)
}
}
#[doc = "Generated from \'VK_KHR_external_fence_win32\'"]
impl StructureType {
pub const IMPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000114000);
}
#[doc = "Generated from \'VK_KHR_external_fence_win32\'"]
impl StructureType {
pub const EXPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000114001);
}
#[doc = "Generated from \'VK_KHR_external_fence_win32\'"]
impl StructureType {
pub const FENCE_GET_WIN32_HANDLE_INFO_KHR: Self = StructureType(1000114002);
}
impl KhrExternalFenceFdFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_external_fence_fd\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkImportFenceFdKHR = extern "system" fn(
device: Device,
p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetFenceFdKHR = extern "system" fn(
device: Device,
p_get_fd_info: *const FenceGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result;
pub struct KhrExternalFenceFdFn {
pub import_fence_fd_khr: extern "system" fn(
device: Device,
p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
) -> Result,
pub get_fence_fd_khr: extern "system" fn(
device: Device,
p_get_fd_info: *const FenceGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result,
}
unsafe impl Send for KhrExternalFenceFdFn {}
unsafe impl Sync for KhrExternalFenceFdFn {}
impl ::std::clone::Clone for KhrExternalFenceFdFn {
fn clone(&self) -> Self {
KhrExternalFenceFdFn {
import_fence_fd_khr: self.import_fence_fd_khr,
get_fence_fd_khr: self.get_fence_fd_khr,
}
}
}
impl KhrExternalFenceFdFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExternalFenceFdFn {
import_fence_fd_khr: unsafe {
extern "system" fn import_fence_fd_khr(
_device: Device,
_p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
) -> Result {
panic!(concat!("Unable to load ", stringify!(import_fence_fd_khr)))
}
let raw_name = stringify!(vkImportFenceFdKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
import_fence_fd_khr
} else {
::std::mem::transmute(val)
}
},
get_fence_fd_khr: unsafe {
extern "system" fn get_fence_fd_khr(
_device: Device,
_p_get_fd_info: *const FenceGetFdInfoKHR,
_p_fd: *mut c_int,
) -> Result {
panic!(concat!("Unable to load ", stringify!(get_fence_fd_khr)))
}
let raw_name = stringify!(vkGetFenceFdKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_fence_fd_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkImportFenceFdKHR.html>"]
pub unsafe fn import_fence_fd_khr(
&self,
device: Device,
p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
) -> Result {
(self.import_fence_fd_khr)(device, p_import_fence_fd_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetFenceFdKHR.html>"]
pub unsafe fn get_fence_fd_khr(
&self,
device: Device,
p_get_fd_info: *const FenceGetFdInfoKHR,
p_fd: *mut c_int,
) -> Result {
(self.get_fence_fd_khr)(device, p_get_fd_info, p_fd)
}
}
#[doc = "Generated from \'VK_KHR_external_fence_fd\'"]
impl StructureType {
pub const IMPORT_FENCE_FD_INFO_KHR: Self = StructureType(1000115000);
}
#[doc = "Generated from \'VK_KHR_external_fence_fd\'"]
impl StructureType {
pub const FENCE_GET_FD_INFO_KHR: Self = StructureType(1000115001);
}
impl KhrExtension117Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_117\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension117Fn {}
unsafe impl Send for KhrExtension117Fn {}
unsafe impl Sync for KhrExtension117Fn {}
impl ::std::clone::Clone for KhrExtension117Fn {
fn clone(&self) -> Self {
KhrExtension117Fn {}
}
}
impl KhrExtension117Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension117Fn {}
}
}
impl KhrMaintenance2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_maintenance2\0")
.expect("Wrong extension string")
}
}
pub struct KhrMaintenance2Fn {}
unsafe impl Send for KhrMaintenance2Fn {}
unsafe impl Sync for KhrMaintenance2Fn {}
impl ::std::clone::Clone for KhrMaintenance2Fn {
fn clone(&self) -> Self {
KhrMaintenance2Fn {}
}
}
impl KhrMaintenance2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrMaintenance2Fn {}
}
}
impl KhrExtension119Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_119\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension119Fn {}
unsafe impl Send for KhrExtension119Fn {}
unsafe impl Sync for KhrExtension119Fn {}
impl ::std::clone::Clone for KhrExtension119Fn {
fn clone(&self) -> Self {
KhrExtension119Fn {}
}
}
impl KhrExtension119Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension119Fn {}
}
}
impl KhrGetSurfaceCapabilities2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_get_surface_capabilities2\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR = extern "system" fn(
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_capabilities: *mut SurfaceCapabilities2KHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceSurfaceFormats2KHR = extern "system" fn(
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormat2KHR,
) -> Result;
pub struct KhrGetSurfaceCapabilities2Fn {
pub get_physical_device_surface_capabilities2_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_capabilities: *mut SurfaceCapabilities2KHR,
) -> Result,
pub get_physical_device_surface_formats2_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormat2KHR,
) -> Result,
}
unsafe impl Send for KhrGetSurfaceCapabilities2Fn {}
unsafe impl Sync for KhrGetSurfaceCapabilities2Fn {}
impl ::std::clone::Clone for KhrGetSurfaceCapabilities2Fn {
fn clone(&self) -> Self {
KhrGetSurfaceCapabilities2Fn {
get_physical_device_surface_capabilities2_khr: self
.get_physical_device_surface_capabilities2_khr,
get_physical_device_surface_formats2_khr: self.get_physical_device_surface_formats2_khr,
}
}
}
impl KhrGetSurfaceCapabilities2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrGetSurfaceCapabilities2Fn {
get_physical_device_surface_capabilities2_khr: unsafe {
extern "system" fn get_physical_device_surface_capabilities2_khr(
_physical_device: PhysicalDevice,
_p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
_p_surface_capabilities: *mut SurfaceCapabilities2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_capabilities2_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceCapabilities2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_capabilities2_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_surface_formats2_khr: unsafe {
extern "system" fn get_physical_device_surface_formats2_khr(
_physical_device: PhysicalDevice,
_p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
_p_surface_format_count: *mut u32,
_p_surface_formats: *mut SurfaceFormat2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_surface_formats2_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceSurfaceFormats2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_surface_formats2_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html>"]
pub unsafe fn get_physical_device_surface_capabilities2_khr(
&self,
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_capabilities: *mut SurfaceCapabilities2KHR,
) -> Result {
(self.get_physical_device_surface_capabilities2_khr)(
physical_device,
p_surface_info,
p_surface_capabilities,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html>"]
pub unsafe fn get_physical_device_surface_formats2_khr(
&self,
physical_device: PhysicalDevice,
p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
p_surface_format_count: *mut u32,
p_surface_formats: *mut SurfaceFormat2KHR,
) -> Result {
(self.get_physical_device_surface_formats2_khr)(
physical_device,
p_surface_info,
p_surface_format_count,
p_surface_formats,
)
}
}
#[doc = "Generated from \'VK_KHR_get_surface_capabilities2\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SURFACE_INFO_2_KHR: Self = StructureType(1000119000);
}
#[doc = "Generated from \'VK_KHR_get_surface_capabilities2\'"]
impl StructureType {
pub const SURFACE_CAPABILITIES_2_KHR: Self = StructureType(1000119001);
}
#[doc = "Generated from \'VK_KHR_get_surface_capabilities2\'"]
impl StructureType {
pub const SURFACE_FORMAT_2_KHR: Self = StructureType(1000119002);
}
impl KhrVariablePointersFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_variable_pointers\0")
.expect("Wrong extension string")
}
}
pub struct KhrVariablePointersFn {}
unsafe impl Send for KhrVariablePointersFn {}
unsafe impl Sync for KhrVariablePointersFn {}
impl ::std::clone::Clone for KhrVariablePointersFn {
fn clone(&self) -> Self {
KhrVariablePointersFn {}
}
}
impl KhrVariablePointersFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrVariablePointersFn {}
}
}
impl KhrGetDisplayProperties2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_get_display_properties2\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceDisplayProperties2KHR = extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayProperties2KHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlaneProperties2KHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDisplayModeProperties2KHR = extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModeProperties2KHR,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetDisplayPlaneCapabilities2KHR = extern "system" fn(
physical_device: PhysicalDevice,
p_display_plane_info: *const DisplayPlaneInfo2KHR,
p_capabilities: *mut DisplayPlaneCapabilities2KHR,
) -> Result;
pub struct KhrGetDisplayProperties2Fn {
pub get_physical_device_display_properties2_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayProperties2KHR,
) -> Result,
pub get_physical_device_display_plane_properties2_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlaneProperties2KHR,
) -> Result,
pub get_display_mode_properties2_khr: extern "system" fn(
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModeProperties2KHR,
) -> Result,
pub get_display_plane_capabilities2_khr: extern "system" fn(
physical_device: PhysicalDevice,
p_display_plane_info: *const DisplayPlaneInfo2KHR,
p_capabilities: *mut DisplayPlaneCapabilities2KHR,
) -> Result,
}
unsafe impl Send for KhrGetDisplayProperties2Fn {}
unsafe impl Sync for KhrGetDisplayProperties2Fn {}
impl ::std::clone::Clone for KhrGetDisplayProperties2Fn {
fn clone(&self) -> Self {
KhrGetDisplayProperties2Fn {
get_physical_device_display_properties2_khr: self
.get_physical_device_display_properties2_khr,
get_physical_device_display_plane_properties2_khr: self
.get_physical_device_display_plane_properties2_khr,
get_display_mode_properties2_khr: self.get_display_mode_properties2_khr,
get_display_plane_capabilities2_khr: self.get_display_plane_capabilities2_khr,
}
}
}
impl KhrGetDisplayProperties2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrGetDisplayProperties2Fn {
get_physical_device_display_properties2_khr: unsafe {
extern "system" fn get_physical_device_display_properties2_khr(
_physical_device: PhysicalDevice,
_p_property_count: *mut u32,
_p_properties: *mut DisplayProperties2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_display_properties2_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceDisplayProperties2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_display_properties2_khr
} else {
::std::mem::transmute(val)
}
},
get_physical_device_display_plane_properties2_khr: unsafe {
extern "system" fn get_physical_device_display_plane_properties2_khr(
_physical_device: PhysicalDevice,
_p_property_count: *mut u32,
_p_properties: *mut DisplayPlaneProperties2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_display_plane_properties2_khr)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceDisplayPlaneProperties2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_display_plane_properties2_khr
} else {
::std::mem::transmute(val)
}
},
get_display_mode_properties2_khr: unsafe {
extern "system" fn get_display_mode_properties2_khr(
_physical_device: PhysicalDevice,
_display: DisplayKHR,
_p_property_count: *mut u32,
_p_properties: *mut DisplayModeProperties2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_display_mode_properties2_khr)
))
}
let raw_name = stringify!(vkGetDisplayModeProperties2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_display_mode_properties2_khr
} else {
::std::mem::transmute(val)
}
},
get_display_plane_capabilities2_khr: unsafe {
extern "system" fn get_display_plane_capabilities2_khr(
_physical_device: PhysicalDevice,
_p_display_plane_info: *const DisplayPlaneInfo2KHR,
_p_capabilities: *mut DisplayPlaneCapabilities2KHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_display_plane_capabilities2_khr)
))
}
let raw_name = stringify!(vkGetDisplayPlaneCapabilities2KHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_display_plane_capabilities2_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html>"]
pub unsafe fn get_physical_device_display_properties2_khr(
&self,
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayProperties2KHR,
) -> Result {
(self.get_physical_device_display_properties2_khr)(
physical_device,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html>"]
pub unsafe fn get_physical_device_display_plane_properties2_khr(
&self,
physical_device: PhysicalDevice,
p_property_count: *mut u32,
p_properties: *mut DisplayPlaneProperties2KHR,
) -> Result {
(self.get_physical_device_display_plane_properties2_khr)(
physical_device,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDisplayModeProperties2KHR.html>"]
pub unsafe fn get_display_mode_properties2_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
p_property_count: *mut u32,
p_properties: *mut DisplayModeProperties2KHR,
) -> Result {
(self.get_display_mode_properties2_khr)(
physical_device,
display,
p_property_count,
p_properties,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetDisplayPlaneCapabilities2KHR.html>"]
pub unsafe fn get_display_plane_capabilities2_khr(
&self,
physical_device: PhysicalDevice,
p_display_plane_info: *const DisplayPlaneInfo2KHR,
p_capabilities: *mut DisplayPlaneCapabilities2KHR,
) -> Result {
(self.get_display_plane_capabilities2_khr)(
physical_device,
p_display_plane_info,
p_capabilities,
)
}
}
#[doc = "Generated from \'VK_KHR_get_display_properties2\'"]
impl StructureType {
pub const DISPLAY_PROPERTIES_2_KHR: Self = StructureType(1000121000);
}
#[doc = "Generated from \'VK_KHR_get_display_properties2\'"]
impl StructureType {
pub const DISPLAY_PLANE_PROPERTIES_2_KHR: Self = StructureType(1000121001);
}
#[doc = "Generated from \'VK_KHR_get_display_properties2\'"]
impl StructureType {
pub const DISPLAY_MODE_PROPERTIES_2_KHR: Self = StructureType(1000121002);
}
#[doc = "Generated from \'VK_KHR_get_display_properties2\'"]
impl StructureType {
pub const DISPLAY_PLANE_INFO_2_KHR: Self = StructureType(1000121003);
}
#[doc = "Generated from \'VK_KHR_get_display_properties2\'"]
impl StructureType {
pub const DISPLAY_PLANE_CAPABILITIES_2_KHR: Self = StructureType(1000121004);
}
impl MvkIosSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_MVK_ios_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateIOSSurfaceMVK = extern "system" fn(
instance: Instance,
p_create_info: *const IOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct MvkIosSurfaceFn {
pub create_ios_surface_mvk: extern "system" fn(
instance: Instance,
p_create_info: *const IOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for MvkIosSurfaceFn {}
unsafe impl Sync for MvkIosSurfaceFn {}
impl ::std::clone::Clone for MvkIosSurfaceFn {
fn clone(&self) -> Self {
MvkIosSurfaceFn {
create_ios_surface_mvk: self.create_ios_surface_mvk,
}
}
}
impl MvkIosSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
MvkIosSurfaceFn {
create_ios_surface_mvk: unsafe {
extern "system" fn create_ios_surface_mvk(
_instance: Instance,
_p_create_info: *const IOSSurfaceCreateInfoMVK,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_ios_surface_mvk)
))
}
let raw_name = stringify!(vkCreateIOSSurfaceMVK);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_ios_surface_mvk
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateIOSSurfaceMVK.html>"]
pub unsafe fn create_ios_surface_mvk(
&self,
instance: Instance,
p_create_info: *const IOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_ios_surface_mvk)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_MVK_ios_surface\'"]
impl StructureType {
pub const IOS_SURFACE_CREATE_INFO_M: Self = StructureType(1000122000);
}
impl MvkMacosSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_MVK_macos_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateMacOSSurfaceMVK = extern "system" fn(
instance: Instance,
p_create_info: *const MacOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct MvkMacosSurfaceFn {
pub create_mac_os_surface_mvk: extern "system" fn(
instance: Instance,
p_create_info: *const MacOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for MvkMacosSurfaceFn {}
unsafe impl Sync for MvkMacosSurfaceFn {}
impl ::std::clone::Clone for MvkMacosSurfaceFn {
fn clone(&self) -> Self {
MvkMacosSurfaceFn {
create_mac_os_surface_mvk: self.create_mac_os_surface_mvk,
}
}
}
impl MvkMacosSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
MvkMacosSurfaceFn {
create_mac_os_surface_mvk: unsafe {
extern "system" fn create_mac_os_surface_mvk(
_instance: Instance,
_p_create_info: *const MacOSSurfaceCreateInfoMVK,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_mac_os_surface_mvk)
))
}
let raw_name = stringify!(vkCreateMacOSSurfaceMVK);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_mac_os_surface_mvk
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateMacOSSurfaceMVK.html>"]
pub unsafe fn create_mac_os_surface_mvk(
&self,
instance: Instance,
p_create_info: *const MacOSSurfaceCreateInfoMVK,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_mac_os_surface_mvk)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_MVK_macos_surface\'"]
impl StructureType {
pub const MACOS_SURFACE_CREATE_INFO_M: Self = StructureType(1000123000);
}
impl MvkMoltenvkFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_MVK_moltenvk\0").expect("Wrong extension string")
}
}
pub struct MvkMoltenvkFn {}
unsafe impl Send for MvkMoltenvkFn {}
unsafe impl Sync for MvkMoltenvkFn {}
impl ::std::clone::Clone for MvkMoltenvkFn {
fn clone(&self) -> Self {
MvkMoltenvkFn {}
}
}
impl MvkMoltenvkFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
MvkMoltenvkFn {}
}
}
impl ExtExternalMemoryDmaBufFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_external_memory_dma_buf\0")
.expect("Wrong extension string")
}
}
pub struct ExtExternalMemoryDmaBufFn {}
unsafe impl Send for ExtExternalMemoryDmaBufFn {}
unsafe impl Sync for ExtExternalMemoryDmaBufFn {}
impl ::std::clone::Clone for ExtExternalMemoryDmaBufFn {
fn clone(&self) -> Self {
ExtExternalMemoryDmaBufFn {}
}
}
impl ExtExternalMemoryDmaBufFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExternalMemoryDmaBufFn {}
}
}
#[doc = "Generated from \'VK_EXT_external_memory_dma_buf\'"]
impl ExternalMemoryHandleTypeFlags {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF: Self =
ExternalMemoryHandleTypeFlags(0b1000000000);
}
impl ExtQueueFamilyForeignFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_queue_family_foreign\0")
.expect("Wrong extension string")
}
}
pub struct ExtQueueFamilyForeignFn {}
unsafe impl Send for ExtQueueFamilyForeignFn {}
unsafe impl Sync for ExtQueueFamilyForeignFn {}
impl ::std::clone::Clone for ExtQueueFamilyForeignFn {
fn clone(&self) -> Self {
ExtQueueFamilyForeignFn {}
}
}
impl ExtQueueFamilyForeignFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtQueueFamilyForeignFn {}
}
}
impl KhrDedicatedAllocationFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_dedicated_allocation\0")
.expect("Wrong extension string")
}
}
pub struct KhrDedicatedAllocationFn {}
unsafe impl Send for KhrDedicatedAllocationFn {}
unsafe impl Sync for KhrDedicatedAllocationFn {}
impl ::std::clone::Clone for KhrDedicatedAllocationFn {
fn clone(&self) -> Self {
KhrDedicatedAllocationFn {}
}
}
impl KhrDedicatedAllocationFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDedicatedAllocationFn {}
}
}
impl ExtDebugUtilsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_debug_utils\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkSetDebugUtilsObjectNameEXT =
extern "system" fn(device: Device, p_name_info: *const DebugUtilsObjectNameInfoEXT) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkSetDebugUtilsObjectTagEXT =
extern "system" fn(device: Device, p_tag_info: *const DebugUtilsObjectTagInfoEXT) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueBeginDebugUtilsLabelEXT =
extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueEndDebugUtilsLabelEXT = extern "system" fn(queue: Queue) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkQueueInsertDebugUtilsLabelEXT =
extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBeginDebugUtilsLabelEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdEndDebugUtilsLabelEXT =
extern "system" fn(command_buffer: CommandBuffer) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdInsertDebugUtilsLabelEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateDebugUtilsMessengerEXT = extern "system" fn(
instance: Instance,
p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_messenger: *mut DebugUtilsMessengerEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyDebugUtilsMessengerEXT = extern "system" fn(
instance: Instance,
messenger: DebugUtilsMessengerEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkSubmitDebugUtilsMessageEXT = extern "system" fn(
instance: Instance,
message_severity: DebugUtilsMessageSeverityFlagsEXT,
message_types: DebugUtilsMessageTypeFlagsEXT,
p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
) -> c_void;
pub struct ExtDebugUtilsFn {
pub set_debug_utils_object_name_ext: extern "system" fn(
device: Device,
p_name_info: *const DebugUtilsObjectNameInfoEXT,
) -> Result,
pub set_debug_utils_object_tag_ext:
extern "system" fn(device: Device, p_tag_info: *const DebugUtilsObjectTagInfoEXT) -> Result,
pub queue_begin_debug_utils_label_ext:
extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT) -> c_void,
pub queue_end_debug_utils_label_ext: extern "system" fn(queue: Queue) -> c_void,
pub queue_insert_debug_utils_label_ext:
extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT) -> c_void,
pub cmd_begin_debug_utils_label_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void,
pub cmd_end_debug_utils_label_ext: extern "system" fn(command_buffer: CommandBuffer) -> c_void,
pub cmd_insert_debug_utils_label_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void,
pub create_debug_utils_messenger_ext: extern "system" fn(
instance: Instance,
p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_messenger: *mut DebugUtilsMessengerEXT,
) -> Result,
pub destroy_debug_utils_messenger_ext: extern "system" fn(
instance: Instance,
messenger: DebugUtilsMessengerEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub submit_debug_utils_message_ext: extern "system" fn(
instance: Instance,
message_severity: DebugUtilsMessageSeverityFlagsEXT,
message_types: DebugUtilsMessageTypeFlagsEXT,
p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
) -> c_void,
}
unsafe impl Send for ExtDebugUtilsFn {}
unsafe impl Sync for ExtDebugUtilsFn {}
impl ::std::clone::Clone for ExtDebugUtilsFn {
fn clone(&self) -> Self {
ExtDebugUtilsFn {
set_debug_utils_object_name_ext: self.set_debug_utils_object_name_ext,
set_debug_utils_object_tag_ext: self.set_debug_utils_object_tag_ext,
queue_begin_debug_utils_label_ext: self.queue_begin_debug_utils_label_ext,
queue_end_debug_utils_label_ext: self.queue_end_debug_utils_label_ext,
queue_insert_debug_utils_label_ext: self.queue_insert_debug_utils_label_ext,
cmd_begin_debug_utils_label_ext: self.cmd_begin_debug_utils_label_ext,
cmd_end_debug_utils_label_ext: self.cmd_end_debug_utils_label_ext,
cmd_insert_debug_utils_label_ext: self.cmd_insert_debug_utils_label_ext,
create_debug_utils_messenger_ext: self.create_debug_utils_messenger_ext,
destroy_debug_utils_messenger_ext: self.destroy_debug_utils_messenger_ext,
submit_debug_utils_message_ext: self.submit_debug_utils_message_ext,
}
}
}
impl ExtDebugUtilsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDebugUtilsFn {
set_debug_utils_object_name_ext: unsafe {
extern "system" fn set_debug_utils_object_name_ext(
_device: Device,
_p_name_info: *const DebugUtilsObjectNameInfoEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(set_debug_utils_object_name_ext)
))
}
let raw_name = stringify!(vkSetDebugUtilsObjectNameEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
set_debug_utils_object_name_ext
} else {
::std::mem::transmute(val)
}
},
set_debug_utils_object_tag_ext: unsafe {
extern "system" fn set_debug_utils_object_tag_ext(
_device: Device,
_p_tag_info: *const DebugUtilsObjectTagInfoEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(set_debug_utils_object_tag_ext)
))
}
let raw_name = stringify!(vkSetDebugUtilsObjectTagEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
set_debug_utils_object_tag_ext
} else {
::std::mem::transmute(val)
}
},
queue_begin_debug_utils_label_ext: unsafe {
extern "system" fn queue_begin_debug_utils_label_ext(
_queue: Queue,
_p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(queue_begin_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkQueueBeginDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_begin_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
queue_end_debug_utils_label_ext: unsafe {
extern "system" fn queue_end_debug_utils_label_ext(_queue: Queue) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(queue_end_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkQueueEndDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_end_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
queue_insert_debug_utils_label_ext: unsafe {
extern "system" fn queue_insert_debug_utils_label_ext(
_queue: Queue,
_p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(queue_insert_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkQueueInsertDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
queue_insert_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
cmd_begin_debug_utils_label_ext: unsafe {
extern "system" fn cmd_begin_debug_utils_label_ext(
_command_buffer: CommandBuffer,
_p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkCmdBeginDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_begin_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
cmd_end_debug_utils_label_ext: unsafe {
extern "system" fn cmd_end_debug_utils_label_ext(
_command_buffer: CommandBuffer,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkCmdEndDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_end_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
cmd_insert_debug_utils_label_ext: unsafe {
extern "system" fn cmd_insert_debug_utils_label_ext(
_command_buffer: CommandBuffer,
_p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_insert_debug_utils_label_ext)
))
}
let raw_name = stringify!(vkCmdInsertDebugUtilsLabelEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_insert_debug_utils_label_ext
} else {
::std::mem::transmute(val)
}
},
create_debug_utils_messenger_ext: unsafe {
extern "system" fn create_debug_utils_messenger_ext(
_instance: Instance,
_p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
_p_allocator: *const AllocationCallbacks,
_p_messenger: *mut DebugUtilsMessengerEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_debug_utils_messenger_ext)
))
}
let raw_name = stringify!(vkCreateDebugUtilsMessengerEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_debug_utils_messenger_ext
} else {
::std::mem::transmute(val)
}
},
destroy_debug_utils_messenger_ext: unsafe {
extern "system" fn destroy_debug_utils_messenger_ext(
_instance: Instance,
_messenger: DebugUtilsMessengerEXT,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_debug_utils_messenger_ext)
))
}
let raw_name = stringify!(vkDestroyDebugUtilsMessengerEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_debug_utils_messenger_ext
} else {
::std::mem::transmute(val)
}
},
submit_debug_utils_message_ext: unsafe {
extern "system" fn submit_debug_utils_message_ext(
_instance: Instance,
_message_severity: DebugUtilsMessageSeverityFlagsEXT,
_message_types: DebugUtilsMessageTypeFlagsEXT,
_p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(submit_debug_utils_message_ext)
))
}
let raw_name = stringify!(vkSubmitDebugUtilsMessageEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
submit_debug_utils_message_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkSetDebugUtilsObjectNameEXT.html>"]
pub unsafe fn set_debug_utils_object_name_ext(
&self,
device: Device,
p_name_info: *const DebugUtilsObjectNameInfoEXT,
) -> Result {
(self.set_debug_utils_object_name_ext)(device, p_name_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkSetDebugUtilsObjectTagEXT.html>"]
pub unsafe fn set_debug_utils_object_tag_ext(
&self,
device: Device,
p_tag_info: *const DebugUtilsObjectTagInfoEXT,
) -> Result {
(self.set_debug_utils_object_tag_ext)(device, p_tag_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueBeginDebugUtilsLabelEXT.html>"]
pub unsafe fn queue_begin_debug_utils_label_ext(
&self,
queue: Queue,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
(self.queue_begin_debug_utils_label_ext)(queue, p_label_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueEndDebugUtilsLabelEXT.html>"]
pub unsafe fn queue_end_debug_utils_label_ext(&self, queue: Queue) -> c_void {
(self.queue_end_debug_utils_label_ext)(queue)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkQueueInsertDebugUtilsLabelEXT.html>"]
pub unsafe fn queue_insert_debug_utils_label_ext(
&self,
queue: Queue,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
(self.queue_insert_debug_utils_label_ext)(queue, p_label_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBeginDebugUtilsLabelEXT.html>"]
pub unsafe fn cmd_begin_debug_utils_label_ext(
&self,
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
(self.cmd_begin_debug_utils_label_ext)(command_buffer, p_label_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdEndDebugUtilsLabelEXT.html>"]
pub unsafe fn cmd_end_debug_utils_label_ext(&self, command_buffer: CommandBuffer) -> c_void {
(self.cmd_end_debug_utils_label_ext)(command_buffer)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdInsertDebugUtilsLabelEXT.html>"]
pub unsafe fn cmd_insert_debug_utils_label_ext(
&self,
command_buffer: CommandBuffer,
p_label_info: *const DebugUtilsLabelEXT,
) -> c_void {
(self.cmd_insert_debug_utils_label_ext)(command_buffer, p_label_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateDebugUtilsMessengerEXT.html>"]
pub unsafe fn create_debug_utils_messenger_ext(
&self,
instance: Instance,
p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_messenger: *mut DebugUtilsMessengerEXT,
) -> Result {
(self.create_debug_utils_messenger_ext)(instance, p_create_info, p_allocator, p_messenger)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyDebugUtilsMessengerEXT.html>"]
pub unsafe fn destroy_debug_utils_messenger_ext(
&self,
instance: Instance,
messenger: DebugUtilsMessengerEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_debug_utils_messenger_ext)(instance, messenger, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkSubmitDebugUtilsMessageEXT.html>"]
pub unsafe fn submit_debug_utils_message_ext(
&self,
instance: Instance,
message_severity: DebugUtilsMessageSeverityFlagsEXT,
message_types: DebugUtilsMessageTypeFlagsEXT,
p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
) -> c_void {
(self.submit_debug_utils_message_ext)(
instance,
message_severity,
message_types,
p_callback_data,
)
}
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl StructureType {
pub const DEBUG_UTILS_OBJECT_NAME_INFO_EXT: Self = StructureType(1000128000);
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl StructureType {
pub const DEBUG_UTILS_OBJECT_TAG_INFO_EXT: Self = StructureType(1000128001);
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl StructureType {
pub const DEBUG_UTILS_LABEL_EXT: Self = StructureType(1000128002);
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl StructureType {
pub const DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT: Self = StructureType(1000128003);
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl StructureType {
pub const DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: Self = StructureType(1000128004);
}
#[doc = "Generated from \'VK_EXT_debug_utils\'"]
impl ObjectType {
pub const DEBUG_UTILS_MESSENGER_EXT: Self = ObjectType(1000128000);
}
impl AndroidExternalMemoryAndroidHardwareBufferFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(
b"VK_ANDROID_external_memory_android_hardware_buffer\0",
)
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetAndroidHardwareBufferPropertiesANDROID = extern "system" fn(
device: Device,
buffer: *const AHardwareBuffer,
p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryAndroidHardwareBufferANDROID = extern "system" fn(
device: Device,
p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
p_buffer: *mut *mut AHardwareBuffer,
) -> Result;
pub struct AndroidExternalMemoryAndroidHardwareBufferFn {
pub get_android_hardware_buffer_properties_android: extern "system" fn(
device: Device,
buffer: *const AHardwareBuffer,
p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
) -> Result,
pub get_memory_android_hardware_buffer_android: extern "system" fn(
device: Device,
p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
p_buffer: *mut *mut AHardwareBuffer,
) -> Result,
}
unsafe impl Send for AndroidExternalMemoryAndroidHardwareBufferFn {}
unsafe impl Sync for AndroidExternalMemoryAndroidHardwareBufferFn {}
impl ::std::clone::Clone for AndroidExternalMemoryAndroidHardwareBufferFn {
fn clone(&self) -> Self {
AndroidExternalMemoryAndroidHardwareBufferFn {
get_android_hardware_buffer_properties_android: self
.get_android_hardware_buffer_properties_android,
get_memory_android_hardware_buffer_android: self
.get_memory_android_hardware_buffer_android,
}
}
}
impl AndroidExternalMemoryAndroidHardwareBufferFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AndroidExternalMemoryAndroidHardwareBufferFn {
get_android_hardware_buffer_properties_android: unsafe {
extern "system" fn get_android_hardware_buffer_properties_android(
_device: Device,
_buffer: *const AHardwareBuffer,
_p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_android_hardware_buffer_properties_android)
))
}
let raw_name = stringify!(vkGetAndroidHardwareBufferPropertiesANDROID);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_android_hardware_buffer_properties_android
} else {
::std::mem::transmute(val)
}
},
get_memory_android_hardware_buffer_android: unsafe {
extern "system" fn get_memory_android_hardware_buffer_android(
_device: Device,
_p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
_p_buffer: *mut *mut AHardwareBuffer,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_android_hardware_buffer_android)
))
}
let raw_name = stringify!(vkGetMemoryAndroidHardwareBufferANDROID);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_android_hardware_buffer_android
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html>"]
pub unsafe fn get_android_hardware_buffer_properties_android(
&self,
device: Device,
buffer: *const AHardwareBuffer,
p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
) -> Result {
(self.get_android_hardware_buffer_properties_android)(device, buffer, p_properties)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html>"]
pub unsafe fn get_memory_android_hardware_buffer_android(
&self,
device: Device,
p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
p_buffer: *mut *mut AHardwareBuffer,
) -> Result {
(self.get_memory_android_hardware_buffer_android)(device, p_info, p_buffer)
}
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl ExternalMemoryHandleTypeFlags {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_ANDROID: Self =
ExternalMemoryHandleTypeFlags(0b10000000000);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const ANDROID_HARDWARE_BUFFER_USAGE_ANDROID: Self = StructureType(1000129000);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID: Self = StructureType(1000129001);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID: Self = StructureType(1000129002);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = StructureType(1000129003);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = StructureType(1000129004);
}
#[doc = "Generated from \'VK_ANDROID_external_memory_android_hardware_buffer\'"]
impl StructureType {
pub const EXTERNAL_FORMAT_ANDROID: Self = StructureType(1000129005);
}
impl ExtSamplerFilterMinmaxFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_sampler_filter_minmax\0")
.expect("Wrong extension string")
}
}
pub struct ExtSamplerFilterMinmaxFn {}
unsafe impl Send for ExtSamplerFilterMinmaxFn {}
unsafe impl Sync for ExtSamplerFilterMinmaxFn {}
impl ::std::clone::Clone for ExtSamplerFilterMinmaxFn {
fn clone(&self) -> Self {
ExtSamplerFilterMinmaxFn {}
}
}
impl ExtSamplerFilterMinmaxFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtSamplerFilterMinmaxFn {}
}
}
#[doc = "Generated from \'VK_EXT_sampler_filter_minmax\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT: Self =
StructureType(1000130000);
}
#[doc = "Generated from \'VK_EXT_sampler_filter_minmax\'"]
impl StructureType {
pub const SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT: Self = StructureType(1000130001);
}
#[doc = "Generated from \'VK_EXT_sampler_filter_minmax\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_FILTER_MINMAX_EXT: Self = FormatFeatureFlags(0b10000000000000000);
}
impl KhrStorageBufferStorageClassFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_storage_buffer_storage_class\0")
.expect("Wrong extension string")
}
}
pub struct KhrStorageBufferStorageClassFn {}
unsafe impl Send for KhrStorageBufferStorageClassFn {}
unsafe impl Sync for KhrStorageBufferStorageClassFn {}
impl ::std::clone::Clone for KhrStorageBufferStorageClassFn {
fn clone(&self) -> Self {
KhrStorageBufferStorageClassFn {}
}
}
impl KhrStorageBufferStorageClassFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrStorageBufferStorageClassFn {}
}
}
impl AmdGpuShaderInt16Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_gpu_shader_int16\0")
.expect("Wrong extension string")
}
}
pub struct AmdGpuShaderInt16Fn {}
unsafe impl Send for AmdGpuShaderInt16Fn {}
unsafe impl Sync for AmdGpuShaderInt16Fn {}
impl ::std::clone::Clone for AmdGpuShaderInt16Fn {
fn clone(&self) -> Self {
AmdGpuShaderInt16Fn {}
}
}
impl AmdGpuShaderInt16Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdGpuShaderInt16Fn {}
}
}
impl AmdExtension134Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_134\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension134Fn {}
unsafe impl Send for AmdExtension134Fn {}
unsafe impl Sync for AmdExtension134Fn {}
impl ::std::clone::Clone for AmdExtension134Fn {
fn clone(&self) -> Self {
AmdExtension134Fn {}
}
}
impl AmdExtension134Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension134Fn {}
}
}
impl AmdExtension135Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_135\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension135Fn {}
unsafe impl Send for AmdExtension135Fn {}
unsafe impl Sync for AmdExtension135Fn {}
impl ::std::clone::Clone for AmdExtension135Fn {
fn clone(&self) -> Self {
AmdExtension135Fn {}
}
}
impl AmdExtension135Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension135Fn {}
}
}
impl AmdExtension136Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_136\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension136Fn {}
unsafe impl Send for AmdExtension136Fn {}
unsafe impl Sync for AmdExtension136Fn {}
impl ::std::clone::Clone for AmdExtension136Fn {
fn clone(&self) -> Self {
AmdExtension136Fn {}
}
}
impl AmdExtension136Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension136Fn {}
}
}
impl AmdMixedAttachmentSamplesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_mixed_attachment_samples\0")
.expect("Wrong extension string")
}
}
pub struct AmdMixedAttachmentSamplesFn {}
unsafe impl Send for AmdMixedAttachmentSamplesFn {}
unsafe impl Sync for AmdMixedAttachmentSamplesFn {}
impl ::std::clone::Clone for AmdMixedAttachmentSamplesFn {
fn clone(&self) -> Self {
AmdMixedAttachmentSamplesFn {}
}
}
impl AmdMixedAttachmentSamplesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdMixedAttachmentSamplesFn {}
}
}
impl AmdShaderFragmentMaskFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_fragment_mask\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderFragmentMaskFn {}
unsafe impl Send for AmdShaderFragmentMaskFn {}
unsafe impl Sync for AmdShaderFragmentMaskFn {}
impl ::std::clone::Clone for AmdShaderFragmentMaskFn {
fn clone(&self) -> Self {
AmdShaderFragmentMaskFn {}
}
}
impl AmdShaderFragmentMaskFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderFragmentMaskFn {}
}
}
impl ExtInlineUniformBlockFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_inline_uniform_block\0")
.expect("Wrong extension string")
}
}
pub struct ExtInlineUniformBlockFn {}
unsafe impl Send for ExtInlineUniformBlockFn {}
unsafe impl Sync for ExtInlineUniformBlockFn {}
impl ::std::clone::Clone for ExtInlineUniformBlockFn {
fn clone(&self) -> Self {
ExtInlineUniformBlockFn {}
}
}
impl ExtInlineUniformBlockFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtInlineUniformBlockFn {}
}
}
#[doc = "Generated from \'VK_EXT_inline_uniform_block\'"]
impl DescriptorType {
pub const INLINE_UNIFORM_BLOCK_EXT: Self = DescriptorType(1000138000);
}
#[doc = "Generated from \'VK_EXT_inline_uniform_block\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: Self = StructureType(1000138000);
}
#[doc = "Generated from \'VK_EXT_inline_uniform_block\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT: Self = StructureType(1000138001);
}
#[doc = "Generated from \'VK_EXT_inline_uniform_block\'"]
impl StructureType {
pub const WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: Self = StructureType(1000138002);
}
#[doc = "Generated from \'VK_EXT_inline_uniform_block\'"]
impl StructureType {
pub const DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT: Self =
StructureType(1000138003);
}
impl AmdExtension140Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_140\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension140Fn {}
unsafe impl Send for AmdExtension140Fn {}
unsafe impl Sync for AmdExtension140Fn {}
impl ::std::clone::Clone for AmdExtension140Fn {
fn clone(&self) -> Self {
AmdExtension140Fn {}
}
}
impl AmdExtension140Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension140Fn {}
}
}
impl ExtShaderStencilExportFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_shader_stencil_export\0")
.expect("Wrong extension string")
}
}
pub struct ExtShaderStencilExportFn {}
unsafe impl Send for ExtShaderStencilExportFn {}
unsafe impl Sync for ExtShaderStencilExportFn {}
impl ::std::clone::Clone for ExtShaderStencilExportFn {
fn clone(&self) -> Self {
ExtShaderStencilExportFn {}
}
}
impl ExtShaderStencilExportFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtShaderStencilExportFn {}
}
}
impl AmdExtension142Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_142\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension142Fn {}
unsafe impl Send for AmdExtension142Fn {}
unsafe impl Sync for AmdExtension142Fn {}
impl ::std::clone::Clone for AmdExtension142Fn {
fn clone(&self) -> Self {
AmdExtension142Fn {}
}
}
impl AmdExtension142Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension142Fn {}
}
}
impl AmdExtension143Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_143\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension143Fn {}
unsafe impl Send for AmdExtension143Fn {}
unsafe impl Sync for AmdExtension143Fn {}
impl ::std::clone::Clone for AmdExtension143Fn {
fn clone(&self) -> Self {
AmdExtension143Fn {}
}
}
impl AmdExtension143Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension143Fn {}
}
}
impl ExtSampleLocationsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_sample_locations\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetSampleLocationsEXT = extern "system" fn(
command_buffer: CommandBuffer,
p_sample_locations_info: *const SampleLocationsInfoEXT,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT = extern "system" fn(
physical_device: PhysicalDevice,
samples: SampleCountFlags,
p_multisample_properties: *mut MultisamplePropertiesEXT,
) -> c_void;
pub struct ExtSampleLocationsFn {
pub cmd_set_sample_locations_ext: extern "system" fn(
command_buffer: CommandBuffer,
p_sample_locations_info: *const SampleLocationsInfoEXT,
) -> c_void,
pub get_physical_device_multisample_properties_ext: extern "system" fn(
physical_device: PhysicalDevice,
samples: SampleCountFlags,
p_multisample_properties: *mut MultisamplePropertiesEXT,
) -> c_void,
}
unsafe impl Send for ExtSampleLocationsFn {}
unsafe impl Sync for ExtSampleLocationsFn {}
impl ::std::clone::Clone for ExtSampleLocationsFn {
fn clone(&self) -> Self {
ExtSampleLocationsFn {
cmd_set_sample_locations_ext: self.cmd_set_sample_locations_ext,
get_physical_device_multisample_properties_ext: self
.get_physical_device_multisample_properties_ext,
}
}
}
impl ExtSampleLocationsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtSampleLocationsFn {
cmd_set_sample_locations_ext: unsafe {
extern "system" fn cmd_set_sample_locations_ext(
_command_buffer: CommandBuffer,
_p_sample_locations_info: *const SampleLocationsInfoEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_sample_locations_ext)
))
}
let raw_name = stringify!(vkCmdSetSampleLocationsEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_sample_locations_ext
} else {
::std::mem::transmute(val)
}
},
get_physical_device_multisample_properties_ext: unsafe {
extern "system" fn get_physical_device_multisample_properties_ext(
_physical_device: PhysicalDevice,
_samples: SampleCountFlags,
_p_multisample_properties: *mut MultisamplePropertiesEXT,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_multisample_properties_ext)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceMultisamplePropertiesEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_multisample_properties_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetSampleLocationsEXT.html>"]
pub unsafe fn cmd_set_sample_locations_ext(
&self,
command_buffer: CommandBuffer,
p_sample_locations_info: *const SampleLocationsInfoEXT,
) -> c_void {
(self.cmd_set_sample_locations_ext)(command_buffer, p_sample_locations_info)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html>"]
pub unsafe fn get_physical_device_multisample_properties_ext(
&self,
physical_device: PhysicalDevice,
samples: SampleCountFlags,
p_multisample_properties: *mut MultisamplePropertiesEXT,
) -> c_void {
(self.get_physical_device_multisample_properties_ext)(
physical_device,
samples,
p_multisample_properties,
)
}
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl ImageCreateFlags {
pub const SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT: Self = ImageCreateFlags(0b1000000000000);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl StructureType {
pub const SAMPLE_LOCATIONS_INFO_EXT: Self = StructureType(1000143000);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl StructureType {
pub const RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: Self = StructureType(1000143001);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl StructureType {
pub const PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: Self = StructureType(1000143002);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: Self = StructureType(1000143003);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl StructureType {
pub const MULTISAMPLE_PROPERTIES_EXT: Self = StructureType(1000143004);
}
#[doc = "Generated from \'VK_EXT_sample_locations\'"]
impl DynamicState {
pub const SAMPLE_LOCATIONS_EXT: Self = DynamicState(1000143000);
}
impl KhrRelaxedBlockLayoutFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_relaxed_block_layout\0")
.expect("Wrong extension string")
}
}
pub struct KhrRelaxedBlockLayoutFn {}
unsafe impl Send for KhrRelaxedBlockLayoutFn {}
unsafe impl Sync for KhrRelaxedBlockLayoutFn {}
impl ::std::clone::Clone for KhrRelaxedBlockLayoutFn {
fn clone(&self) -> Self {
KhrRelaxedBlockLayoutFn {}
}
}
impl KhrRelaxedBlockLayoutFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrRelaxedBlockLayoutFn {}
}
}
impl KhrGetMemoryRequirements2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_get_memory_requirements2\0")
.expect("Wrong extension string")
}
}
pub struct KhrGetMemoryRequirements2Fn {}
unsafe impl Send for KhrGetMemoryRequirements2Fn {}
unsafe impl Sync for KhrGetMemoryRequirements2Fn {}
impl ::std::clone::Clone for KhrGetMemoryRequirements2Fn {
fn clone(&self) -> Self {
KhrGetMemoryRequirements2Fn {}
}
}
impl KhrGetMemoryRequirements2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrGetMemoryRequirements2Fn {}
}
}
impl KhrImageFormatListFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_image_format_list\0")
.expect("Wrong extension string")
}
}
pub struct KhrImageFormatListFn {}
unsafe impl Send for KhrImageFormatListFn {}
unsafe impl Sync for KhrImageFormatListFn {}
impl ::std::clone::Clone for KhrImageFormatListFn {
fn clone(&self) -> Self {
KhrImageFormatListFn {}
}
}
impl KhrImageFormatListFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrImageFormatListFn {}
}
}
#[doc = "Generated from \'VK_KHR_image_format_list\'"]
impl StructureType {
pub const IMAGE_FORMAT_LIST_CREATE_INFO_KHR: Self = StructureType(1000147000);
}
impl ExtBlendOperationAdvancedFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_blend_operation_advanced\0")
.expect("Wrong extension string")
}
}
pub struct ExtBlendOperationAdvancedFn {}
unsafe impl Send for ExtBlendOperationAdvancedFn {}
unsafe impl Sync for ExtBlendOperationAdvancedFn {}
impl ::std::clone::Clone for ExtBlendOperationAdvancedFn {
fn clone(&self) -> Self {
ExtBlendOperationAdvancedFn {}
}
}
impl ExtBlendOperationAdvancedFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtBlendOperationAdvancedFn {}
}
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: Self =
StructureType(1000148000);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT: Self =
StructureType(1000148001);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl StructureType {
pub const PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: Self = StructureType(1000148002);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const ZERO_EXT: Self = BlendOp(1000148000);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SRC_EXT: Self = BlendOp(1000148001);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DST_EXT: Self = BlendOp(1000148002);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SRC_OVER_EXT: Self = BlendOp(1000148003);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DST_OVER_EXT: Self = BlendOp(1000148004);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SRC_IN_EXT: Self = BlendOp(1000148005);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DST_IN_EXT: Self = BlendOp(1000148006);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SRC_OUT_EXT: Self = BlendOp(1000148007);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DST_OUT_EXT: Self = BlendOp(1000148008);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SRC_ATOP_EXT: Self = BlendOp(1000148009);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DST_ATOP_EXT: Self = BlendOp(1000148010);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const XOR_EXT: Self = BlendOp(1000148011);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const MULTIPLY_EXT: Self = BlendOp(1000148012);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SCREEN_EXT: Self = BlendOp(1000148013);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const OVERLAY_EXT: Self = BlendOp(1000148014);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DARKEN_EXT: Self = BlendOp(1000148015);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const LIGHTEN_EXT: Self = BlendOp(1000148016);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const COLORDODGE_EXT: Self = BlendOp(1000148017);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const COLORBURN_EXT: Self = BlendOp(1000148018);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HARDLIGHT_EXT: Self = BlendOp(1000148019);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const SOFTLIGHT_EXT: Self = BlendOp(1000148020);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const DIFFERENCE_EXT: Self = BlendOp(1000148021);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const EXCLUSION_EXT: Self = BlendOp(1000148022);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const INVERT_EXT: Self = BlendOp(1000148023);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const INVERT_RGB_EXT: Self = BlendOp(1000148024);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const LINEARDODGE_EXT: Self = BlendOp(1000148025);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const LINEARBURN_EXT: Self = BlendOp(1000148026);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const VIVIDLIGHT_EXT: Self = BlendOp(1000148027);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const LINEARLIGHT_EXT: Self = BlendOp(1000148028);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const PINLIGHT_EXT: Self = BlendOp(1000148029);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HARDMIX_EXT: Self = BlendOp(1000148030);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HSL_HUE_EXT: Self = BlendOp(1000148031);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HSL_SATURATION_EXT: Self = BlendOp(1000148032);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HSL_COLOR_EXT: Self = BlendOp(1000148033);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const HSL_LUMINOSITY_EXT: Self = BlendOp(1000148034);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const PLUS_EXT: Self = BlendOp(1000148035);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const PLUS_CLAMPED_EXT: Self = BlendOp(1000148036);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const PLUS_CLAMPED_ALPHA_EXT: Self = BlendOp(1000148037);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const PLUS_DARKER_EXT: Self = BlendOp(1000148038);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const MINUS_EXT: Self = BlendOp(1000148039);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const MINUS_CLAMPED_EXT: Self = BlendOp(1000148040);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const CONTRAST_EXT: Self = BlendOp(1000148041);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const INVERT_OVG_EXT: Self = BlendOp(1000148042);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const RED_EXT: Self = BlendOp(1000148043);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const GREEN_EXT: Self = BlendOp(1000148044);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl BlendOp {
pub const BLUE_EXT: Self = BlendOp(1000148045);
}
#[doc = "Generated from \'VK_EXT_blend_operation_advanced\'"]
impl AccessFlags {
pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = AccessFlags(0b10000000000000000000);
}
impl NvFragmentCoverageToColorFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_fragment_coverage_to_color\0")
.expect("Wrong extension string")
}
}
pub struct NvFragmentCoverageToColorFn {}
unsafe impl Send for NvFragmentCoverageToColorFn {}
unsafe impl Sync for NvFragmentCoverageToColorFn {}
impl ::std::clone::Clone for NvFragmentCoverageToColorFn {
fn clone(&self) -> Self {
NvFragmentCoverageToColorFn {}
}
}
impl NvFragmentCoverageToColorFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvFragmentCoverageToColorFn {}
}
}
#[doc = "Generated from \'VK_NV_fragment_coverage_to_color\'"]
impl StructureType {
pub const PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: Self = StructureType(1000149000);
}
impl NvExtension151Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_151\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension151Fn {}
unsafe impl Send for NvExtension151Fn {}
unsafe impl Sync for NvExtension151Fn {}
impl ::std::clone::Clone for NvExtension151Fn {
fn clone(&self) -> Self {
NvExtension151Fn {}
}
}
impl NvExtension151Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension151Fn {}
}
}
impl NvExtension152Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_152\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension152Fn {}
unsafe impl Send for NvExtension152Fn {}
unsafe impl Sync for NvExtension152Fn {}
impl ::std::clone::Clone for NvExtension152Fn {
fn clone(&self) -> Self {
NvExtension152Fn {}
}
}
impl NvExtension152Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension152Fn {}
}
}
impl NvFramebufferMixedSamplesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_framebuffer_mixed_samples\0")
.expect("Wrong extension string")
}
}
pub struct NvFramebufferMixedSamplesFn {}
unsafe impl Send for NvFramebufferMixedSamplesFn {}
unsafe impl Sync for NvFramebufferMixedSamplesFn {}
impl ::std::clone::Clone for NvFramebufferMixedSamplesFn {
fn clone(&self) -> Self {
NvFramebufferMixedSamplesFn {}
}
}
impl NvFramebufferMixedSamplesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvFramebufferMixedSamplesFn {}
}
}
#[doc = "Generated from \'VK_NV_framebuffer_mixed_samples\'"]
impl StructureType {
pub const PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: Self = StructureType(1000152000);
}
impl NvFillRectangleFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_fill_rectangle\0")
.expect("Wrong extension string")
}
}
pub struct NvFillRectangleFn {}
unsafe impl Send for NvFillRectangleFn {}
unsafe impl Sync for NvFillRectangleFn {}
impl ::std::clone::Clone for NvFillRectangleFn {
fn clone(&self) -> Self {
NvFillRectangleFn {}
}
}
impl NvFillRectangleFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvFillRectangleFn {}
}
}
#[doc = "Generated from \'VK_NV_fill_rectangle\'"]
impl PolygonMode {
pub const FILL_RECTANGLE_NV: Self = PolygonMode(1000153000);
}
impl NvExtension155Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_155\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension155Fn {}
unsafe impl Send for NvExtension155Fn {}
unsafe impl Sync for NvExtension155Fn {}
impl ::std::clone::Clone for NvExtension155Fn {
fn clone(&self) -> Self {
NvExtension155Fn {}
}
}
impl NvExtension155Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension155Fn {}
}
}
impl ExtPostDepthCoverageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_post_depth_coverage\0")
.expect("Wrong extension string")
}
}
pub struct ExtPostDepthCoverageFn {}
unsafe impl Send for ExtPostDepthCoverageFn {}
unsafe impl Sync for ExtPostDepthCoverageFn {}
impl ::std::clone::Clone for ExtPostDepthCoverageFn {
fn clone(&self) -> Self {
ExtPostDepthCoverageFn {}
}
}
impl ExtPostDepthCoverageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtPostDepthCoverageFn {}
}
}
impl KhrSamplerYcbcrConversionFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_sampler_ycbcr_conversion\0")
.expect("Wrong extension string")
}
}
pub struct KhrSamplerYcbcrConversionFn {}
unsafe impl Send for KhrSamplerYcbcrConversionFn {}
unsafe impl Sync for KhrSamplerYcbcrConversionFn {}
impl ::std::clone::Clone for KhrSamplerYcbcrConversionFn {
fn clone(&self) -> Self {
KhrSamplerYcbcrConversionFn {}
}
}
impl KhrSamplerYcbcrConversionFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSamplerYcbcrConversionFn {}
}
}
impl KhrBindMemory2Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_bind_memory2\0")
.expect("Wrong extension string")
}
}
pub struct KhrBindMemory2Fn {}
unsafe impl Send for KhrBindMemory2Fn {}
unsafe impl Sync for KhrBindMemory2Fn {}
impl ::std::clone::Clone for KhrBindMemory2Fn {
fn clone(&self) -> Self {
KhrBindMemory2Fn {}
}
}
impl KhrBindMemory2Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrBindMemory2Fn {}
}
}
impl ExtImageDrmFormatModifierFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_image_drm_format_modifier\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetImageDrmFormatModifierPropertiesEXT = extern "system" fn(
device: Device,
image: Image,
p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
) -> Result;
pub struct ExtImageDrmFormatModifierFn {
pub get_image_drm_format_modifier_properties_ext: extern "system" fn(
device: Device,
image: Image,
p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
) -> Result,
}
unsafe impl Send for ExtImageDrmFormatModifierFn {}
unsafe impl Sync for ExtImageDrmFormatModifierFn {}
impl ::std::clone::Clone for ExtImageDrmFormatModifierFn {
fn clone(&self) -> Self {
ExtImageDrmFormatModifierFn {
get_image_drm_format_modifier_properties_ext: self
.get_image_drm_format_modifier_properties_ext,
}
}
}
impl ExtImageDrmFormatModifierFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtImageDrmFormatModifierFn {
get_image_drm_format_modifier_properties_ext: unsafe {
extern "system" fn get_image_drm_format_modifier_properties_ext(
_device: Device,
_image: Image,
_p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_image_drm_format_modifier_properties_ext)
))
}
let raw_name = stringify!(vkGetImageDrmFormatModifierPropertiesEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_image_drm_format_modifier_properties_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html>"]
pub unsafe fn get_image_drm_format_modifier_properties_ext(
&self,
device: Device,
image: Image,
p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
) -> Result {
(self.get_image_drm_format_modifier_properties_ext)(device, image, p_properties)
}
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl Result {
pub const ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: Self = Result(-1000158000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT: Self = StructureType(1000158000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const DRM_FORMAT_MODIFIER_PROPERTIES_EXT: Self = StructureType(1000158001);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: Self = StructureType(1000158002);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: Self = StructureType(1000158003);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: Self = StructureType(1000158004);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl StructureType {
pub const IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT: Self = StructureType(1000158005);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl ImageTiling {
pub const DRM_FORMAT_MODIFIER_EXT: Self = ImageTiling(1000158000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl ImageAspectFlags {
pub const MEMORY_PLANE_0_EXT: Self = ImageAspectFlags(0b10000000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl ImageAspectFlags {
pub const MEMORY_PLANE_1_EXT: Self = ImageAspectFlags(0b100000000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl ImageAspectFlags {
pub const MEMORY_PLANE_2_EXT: Self = ImageAspectFlags(0b1000000000);
}
#[doc = "Generated from \'VK_EXT_image_drm_format_modifier\'"]
impl ImageAspectFlags {
pub const MEMORY_PLANE_3_EXT: Self = ImageAspectFlags(0b10000000000);
}
impl ExtExtension160Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_160\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension160Fn {}
unsafe impl Send for ExtExtension160Fn {}
unsafe impl Sync for ExtExtension160Fn {}
impl ::std::clone::Clone for ExtExtension160Fn {
fn clone(&self) -> Self {
ExtExtension160Fn {}
}
}
impl ExtExtension160Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension160Fn {}
}
}
impl ExtValidationCacheFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_validation_cache\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateValidationCacheEXT = extern "system" fn(
device: Device,
p_create_info: *const ValidationCacheCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_validation_cache: *mut ValidationCacheEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyValidationCacheEXT = extern "system" fn(
device: Device,
validation_cache: ValidationCacheEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkMergeValidationCachesEXT = extern "system" fn(
device: Device,
dst_cache: ValidationCacheEXT,
src_cache_count: u32,
p_src_caches: *const ValidationCacheEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetValidationCacheDataEXT = extern "system" fn(
device: Device,
validation_cache: ValidationCacheEXT,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result;
pub struct ExtValidationCacheFn {
pub create_validation_cache_ext: extern "system" fn(
device: Device,
p_create_info: *const ValidationCacheCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_validation_cache: *mut ValidationCacheEXT,
) -> Result,
pub destroy_validation_cache_ext: extern "system" fn(
device: Device,
validation_cache: ValidationCacheEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub merge_validation_caches_ext: extern "system" fn(
device: Device,
dst_cache: ValidationCacheEXT,
src_cache_count: u32,
p_src_caches: *const ValidationCacheEXT,
) -> Result,
pub get_validation_cache_data_ext: extern "system" fn(
device: Device,
validation_cache: ValidationCacheEXT,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result,
}
unsafe impl Send for ExtValidationCacheFn {}
unsafe impl Sync for ExtValidationCacheFn {}
impl ::std::clone::Clone for ExtValidationCacheFn {
fn clone(&self) -> Self {
ExtValidationCacheFn {
create_validation_cache_ext: self.create_validation_cache_ext,
destroy_validation_cache_ext: self.destroy_validation_cache_ext,
merge_validation_caches_ext: self.merge_validation_caches_ext,
get_validation_cache_data_ext: self.get_validation_cache_data_ext,
}
}
}
impl ExtValidationCacheFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtValidationCacheFn {
create_validation_cache_ext: unsafe {
extern "system" fn create_validation_cache_ext(
_device: Device,
_p_create_info: *const ValidationCacheCreateInfoEXT,
_p_allocator: *const AllocationCallbacks,
_p_validation_cache: *mut ValidationCacheEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_validation_cache_ext)
))
}
let raw_name = stringify!(vkCreateValidationCacheEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_validation_cache_ext
} else {
::std::mem::transmute(val)
}
},
destroy_validation_cache_ext: unsafe {
extern "system" fn destroy_validation_cache_ext(
_device: Device,
_validation_cache: ValidationCacheEXT,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_validation_cache_ext)
))
}
let raw_name = stringify!(vkDestroyValidationCacheEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_validation_cache_ext
} else {
::std::mem::transmute(val)
}
},
merge_validation_caches_ext: unsafe {
extern "system" fn merge_validation_caches_ext(
_device: Device,
_dst_cache: ValidationCacheEXT,
_src_cache_count: u32,
_p_src_caches: *const ValidationCacheEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(merge_validation_caches_ext)
))
}
let raw_name = stringify!(vkMergeValidationCachesEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
merge_validation_caches_ext
} else {
::std::mem::transmute(val)
}
},
get_validation_cache_data_ext: unsafe {
extern "system" fn get_validation_cache_data_ext(
_device: Device,
_validation_cache: ValidationCacheEXT,
_p_data_size: *mut usize,
_p_data: *mut c_void,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_validation_cache_data_ext)
))
}
let raw_name = stringify!(vkGetValidationCacheDataEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_validation_cache_data_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateValidationCacheEXT.html>"]
pub unsafe fn create_validation_cache_ext(
&self,
device: Device,
p_create_info: *const ValidationCacheCreateInfoEXT,
p_allocator: *const AllocationCallbacks,
p_validation_cache: *mut ValidationCacheEXT,
) -> Result {
(self.create_validation_cache_ext)(device, p_create_info, p_allocator, p_validation_cache)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyValidationCacheEXT.html>"]
pub unsafe fn destroy_validation_cache_ext(
&self,
device: Device,
validation_cache: ValidationCacheEXT,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_validation_cache_ext)(device, validation_cache, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkMergeValidationCachesEXT.html>"]
pub unsafe fn merge_validation_caches_ext(
&self,
device: Device,
dst_cache: ValidationCacheEXT,
src_cache_count: u32,
p_src_caches: *const ValidationCacheEXT,
) -> Result {
(self.merge_validation_caches_ext)(device, dst_cache, src_cache_count, p_src_caches)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetValidationCacheDataEXT.html>"]
pub unsafe fn get_validation_cache_data_ext(
&self,
device: Device,
validation_cache: ValidationCacheEXT,
p_data_size: *mut usize,
p_data: *mut c_void,
) -> Result {
(self.get_validation_cache_data_ext)(device, validation_cache, p_data_size, p_data)
}
}
#[doc = "Generated from \'VK_EXT_validation_cache\'"]
impl StructureType {
pub const VALIDATION_CACHE_CREATE_INFO_EXT: Self = StructureType(1000160000);
}
#[doc = "Generated from \'VK_EXT_validation_cache\'"]
impl StructureType {
pub const SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: Self = StructureType(1000160001);
}
#[doc = "Generated from \'VK_EXT_validation_cache\'"]
impl ObjectType {
pub const VALIDATION_CACHE_EXT: Self = ObjectType(1000160000);
}
impl ExtDescriptorIndexingFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_descriptor_indexing\0")
.expect("Wrong extension string")
}
}
pub struct ExtDescriptorIndexingFn {}
unsafe impl Send for ExtDescriptorIndexingFn {}
unsafe impl Sync for ExtDescriptorIndexingFn {}
impl ::std::clone::Clone for ExtDescriptorIndexingFn {
fn clone(&self) -> Self {
ExtDescriptorIndexingFn {}
}
}
impl ExtDescriptorIndexingFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtDescriptorIndexingFn {}
}
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl StructureType {
pub const DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT: Self = StructureType(1000161000);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: Self = StructureType(1000161001);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT: Self = StructureType(1000161002);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl StructureType {
pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT: Self =
StructureType(1000161003);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl StructureType {
pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT: Self =
StructureType(1000161004);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl DescriptorPoolCreateFlags {
pub const UPDATE_AFTER_BIND_EXT: Self = DescriptorPoolCreateFlags(0b10);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl DescriptorSetLayoutCreateFlags {
pub const UPDATE_AFTER_BIND_POOL_EXT: Self = DescriptorSetLayoutCreateFlags(0b10);
}
#[doc = "Generated from \'VK_EXT_descriptor_indexing\'"]
impl Result {
pub const ERROR_FRAGMENTATION_EXT: Self = Result(-1000161000);
}
impl ExtShaderViewportIndexLayerFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_shader_viewport_index_layer\0")
.expect("Wrong extension string")
}
}
pub struct ExtShaderViewportIndexLayerFn {}
unsafe impl Send for ExtShaderViewportIndexLayerFn {}
unsafe impl Sync for ExtShaderViewportIndexLayerFn {}
impl ::std::clone::Clone for ExtShaderViewportIndexLayerFn {
fn clone(&self) -> Self {
ExtShaderViewportIndexLayerFn {}
}
}
impl ExtShaderViewportIndexLayerFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtShaderViewportIndexLayerFn {}
}
}
impl NvExtension164Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_164\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension164Fn {}
unsafe impl Send for NvExtension164Fn {}
unsafe impl Sync for NvExtension164Fn {}
impl ::std::clone::Clone for NvExtension164Fn {
fn clone(&self) -> Self {
NvExtension164Fn {}
}
}
impl NvExtension164Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension164Fn {}
}
}
impl NvShadingRateImageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_shading_rate_image\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBindShadingRateImageNV = extern "system" fn(
command_buffer: CommandBuffer,
image_view: ImageView,
image_layout: ImageLayout,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetViewportShadingRatePaletteNV = extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_shading_rate_palettes: *const ShadingRatePaletteNV,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetCoarseSampleOrderNV = extern "system" fn(
command_buffer: CommandBuffer,
sample_order_type: CoarseSampleOrderTypeNV,
custom_sample_order_count: u32,
p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
) -> c_void;
pub struct NvShadingRateImageFn {
pub cmd_bind_shading_rate_image_nv: extern "system" fn(
command_buffer: CommandBuffer,
image_view: ImageView,
image_layout: ImageLayout,
) -> c_void,
pub cmd_set_viewport_shading_rate_palette_nv: extern "system" fn(
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_shading_rate_palettes: *const ShadingRatePaletteNV,
) -> c_void,
pub cmd_set_coarse_sample_order_nv: extern "system" fn(
command_buffer: CommandBuffer,
sample_order_type: CoarseSampleOrderTypeNV,
custom_sample_order_count: u32,
p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
) -> c_void,
}
unsafe impl Send for NvShadingRateImageFn {}
unsafe impl Sync for NvShadingRateImageFn {}
impl ::std::clone::Clone for NvShadingRateImageFn {
fn clone(&self) -> Self {
NvShadingRateImageFn {
cmd_bind_shading_rate_image_nv: self.cmd_bind_shading_rate_image_nv,
cmd_set_viewport_shading_rate_palette_nv: self.cmd_set_viewport_shading_rate_palette_nv,
cmd_set_coarse_sample_order_nv: self.cmd_set_coarse_sample_order_nv,
}
}
}
impl NvShadingRateImageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvShadingRateImageFn {
cmd_bind_shading_rate_image_nv: unsafe {
extern "system" fn cmd_bind_shading_rate_image_nv(
_command_buffer: CommandBuffer,
_image_view: ImageView,
_image_layout: ImageLayout,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_shading_rate_image_nv)
))
}
let raw_name = stringify!(vkCmdBindShadingRateImageNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_bind_shading_rate_image_nv
} else {
::std::mem::transmute(val)
}
},
cmd_set_viewport_shading_rate_palette_nv: unsafe {
extern "system" fn cmd_set_viewport_shading_rate_palette_nv(
_command_buffer: CommandBuffer,
_first_viewport: u32,
_viewport_count: u32,
_p_shading_rate_palettes: *const ShadingRatePaletteNV,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_viewport_shading_rate_palette_nv)
))
}
let raw_name = stringify!(vkCmdSetViewportShadingRatePaletteNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_viewport_shading_rate_palette_nv
} else {
::std::mem::transmute(val)
}
},
cmd_set_coarse_sample_order_nv: unsafe {
extern "system" fn cmd_set_coarse_sample_order_nv(
_command_buffer: CommandBuffer,
_sample_order_type: CoarseSampleOrderTypeNV,
_custom_sample_order_count: u32,
_p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_coarse_sample_order_nv)
))
}
let raw_name = stringify!(vkCmdSetCoarseSampleOrderNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_coarse_sample_order_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBindShadingRateImageNV.html>"]
pub unsafe fn cmd_bind_shading_rate_image_nv(
&self,
command_buffer: CommandBuffer,
image_view: ImageView,
image_layout: ImageLayout,
) -> c_void {
(self.cmd_bind_shading_rate_image_nv)(command_buffer, image_view, image_layout)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetViewportShadingRatePaletteNV.html>"]
pub unsafe fn cmd_set_viewport_shading_rate_palette_nv(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_count: u32,
p_shading_rate_palettes: *const ShadingRatePaletteNV,
) -> c_void {
(self.cmd_set_viewport_shading_rate_palette_nv)(
command_buffer,
first_viewport,
viewport_count,
p_shading_rate_palettes,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetCoarseSampleOrderNV.html>"]
pub unsafe fn cmd_set_coarse_sample_order_nv(
&self,
command_buffer: CommandBuffer,
sample_order_type: CoarseSampleOrderTypeNV,
custom_sample_order_count: u32,
p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
) -> c_void {
(self.cmd_set_coarse_sample_order_nv)(
command_buffer,
sample_order_type,
custom_sample_order_count,
p_custom_sample_orders,
)
}
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl StructureType {
pub const PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: Self =
StructureType(1000164000);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: Self = StructureType(1000164001);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV: Self = StructureType(1000164002);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl ImageLayout {
pub const SHADING_RATE_OPTIMAL_NV: Self = ImageLayout(1000164003);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl DynamicState {
pub const VIEWPORT_SHADING_RATE_PALETTE_NV: Self = DynamicState(1000164004);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl AccessFlags {
pub const SHADING_RATE_IMAGE_READ_NV: Self = AccessFlags(0b100000000000000000000000);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl ImageUsageFlags {
pub const SHADING_RATE_IMAGE_NV: Self = ImageUsageFlags(0b100000000);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl PipelineStageFlags {
pub const SHADING_RATE_IMAGE_NV: Self = PipelineStageFlags(0b10000000000000000000000);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl StructureType {
pub const PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: Self =
StructureType(1000164005);
}
#[doc = "Generated from \'VK_NV_shading_rate_image\'"]
impl DynamicState {
pub const VIEWPORT_COARSE_SAMPLE_ORDER_NV: Self = DynamicState(1000164006);
}
impl NvRayTracingFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_ray_tracing\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateAccelerationStructureNV = extern "system" fn(
device: Device,
p_create_info: *const AccelerationStructureCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_acceleration_structure: *mut AccelerationStructureNV,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkDestroyAccelerationStructureNV = extern "system" fn(
device: Device,
acceleration_structure: AccelerationStructureNV,
p_allocator: *const AllocationCallbacks,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetAccelerationStructureMemoryRequirementsNV = extern "system" fn(
device: Device,
p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
p_memory_requirements: *mut MemoryRequirements2KHR,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkBindAccelerationStructureMemoryNV = extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdBuildAccelerationStructureNV = extern "system" fn(
command_buffer: CommandBuffer,
p_info: *const AccelerationStructureInfoNV,
instance_data: Buffer,
instance_offset: DeviceSize,
update: Bool32,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
scratch: Buffer,
scratch_offset: DeviceSize,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdCopyAccelerationStructureNV = extern "system" fn(
command_buffer: CommandBuffer,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
mode: CopyAccelerationStructureModeNV,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdTraceRaysNV = extern "system" fn(
command_buffer: CommandBuffer,
raygen_shader_binding_table_buffer: Buffer,
raygen_shader_binding_offset: DeviceSize,
miss_shader_binding_table_buffer: Buffer,
miss_shader_binding_offset: DeviceSize,
miss_shader_binding_stride: DeviceSize,
hit_shader_binding_table_buffer: Buffer,
hit_shader_binding_offset: DeviceSize,
hit_shader_binding_stride: DeviceSize,
callable_shader_binding_table_buffer: Buffer,
callable_shader_binding_offset: DeviceSize,
callable_shader_binding_stride: DeviceSize,
width: u32,
height: u32,
depth: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCreateRayTracingPipelinesNV = extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const RayTracingPipelineCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetRayTracingShaderGroupHandlesNV = extern "system" fn(
device: Device,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data_size: usize,
p_data: *mut c_void,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetAccelerationStructureHandleNV = extern "system" fn(
device: Device,
acceleration_structure: AccelerationStructureNV,
data_size: usize,
p_data: *mut c_void,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdWriteAccelerationStructuresPropertiesNV = extern "system" fn(
command_buffer: CommandBuffer,
acceleration_structure_count: u32,
p_acceleration_structures: *const AccelerationStructureNV,
query_type: QueryType,
query_pool: QueryPool,
first_query: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCompileDeferredNV =
extern "system" fn(device: Device, pipeline: Pipeline, shader: u32) -> Result;
pub struct NvRayTracingFn {
pub create_acceleration_structure_nv: extern "system" fn(
device: Device,
p_create_info: *const AccelerationStructureCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_acceleration_structure: *mut AccelerationStructureNV,
) -> Result,
pub destroy_acceleration_structure_nv: extern "system" fn(
device: Device,
acceleration_structure: AccelerationStructureNV,
p_allocator: *const AllocationCallbacks,
) -> c_void,
pub get_acceleration_structure_memory_requirements_nv: extern "system" fn(
device: Device,
p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
p_memory_requirements: *mut MemoryRequirements2KHR,
) -> c_void,
pub bind_acceleration_structure_memory_nv: extern "system" fn(
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
) -> Result,
pub cmd_build_acceleration_structure_nv: extern "system" fn(
command_buffer: CommandBuffer,
p_info: *const AccelerationStructureInfoNV,
instance_data: Buffer,
instance_offset: DeviceSize,
update: Bool32,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
scratch: Buffer,
scratch_offset: DeviceSize,
) -> c_void,
pub cmd_copy_acceleration_structure_nv: extern "system" fn(
command_buffer: CommandBuffer,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
mode: CopyAccelerationStructureModeNV,
) -> c_void,
pub cmd_trace_rays_nv: extern "system" fn(
command_buffer: CommandBuffer,
raygen_shader_binding_table_buffer: Buffer,
raygen_shader_binding_offset: DeviceSize,
miss_shader_binding_table_buffer: Buffer,
miss_shader_binding_offset: DeviceSize,
miss_shader_binding_stride: DeviceSize,
hit_shader_binding_table_buffer: Buffer,
hit_shader_binding_offset: DeviceSize,
hit_shader_binding_stride: DeviceSize,
callable_shader_binding_table_buffer: Buffer,
callable_shader_binding_offset: DeviceSize,
callable_shader_binding_stride: DeviceSize,
width: u32,
height: u32,
depth: u32,
) -> c_void,
pub create_ray_tracing_pipelines_nv: extern "system" fn(
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const RayTracingPipelineCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result,
pub get_ray_tracing_shader_group_handles_nv: extern "system" fn(
device: Device,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data_size: usize,
p_data: *mut c_void,
) -> Result,
pub get_acceleration_structure_handle_nv: extern "system" fn(
device: Device,
acceleration_structure: AccelerationStructureNV,
data_size: usize,
p_data: *mut c_void,
) -> Result,
pub cmd_write_acceleration_structures_properties_nv: extern "system" fn(
command_buffer: CommandBuffer,
acceleration_structure_count: u32,
p_acceleration_structures: *const AccelerationStructureNV,
query_type: QueryType,
query_pool: QueryPool,
first_query: u32,
) -> c_void,
pub compile_deferred_nv:
extern "system" fn(device: Device, pipeline: Pipeline, shader: u32) -> Result,
}
unsafe impl Send for NvRayTracingFn {}
unsafe impl Sync for NvRayTracingFn {}
impl ::std::clone::Clone for NvRayTracingFn {
fn clone(&self) -> Self {
NvRayTracingFn {
create_acceleration_structure_nv: self.create_acceleration_structure_nv,
destroy_acceleration_structure_nv: self.destroy_acceleration_structure_nv,
get_acceleration_structure_memory_requirements_nv: self
.get_acceleration_structure_memory_requirements_nv,
bind_acceleration_structure_memory_nv: self.bind_acceleration_structure_memory_nv,
cmd_build_acceleration_structure_nv: self.cmd_build_acceleration_structure_nv,
cmd_copy_acceleration_structure_nv: self.cmd_copy_acceleration_structure_nv,
cmd_trace_rays_nv: self.cmd_trace_rays_nv,
create_ray_tracing_pipelines_nv: self.create_ray_tracing_pipelines_nv,
get_ray_tracing_shader_group_handles_nv: self.get_ray_tracing_shader_group_handles_nv,
get_acceleration_structure_handle_nv: self.get_acceleration_structure_handle_nv,
cmd_write_acceleration_structures_properties_nv: self
.cmd_write_acceleration_structures_properties_nv,
compile_deferred_nv: self.compile_deferred_nv,
}
}
}
impl NvRayTracingFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvRayTracingFn {
create_acceleration_structure_nv: unsafe {
extern "system" fn create_acceleration_structure_nv(
_device: Device,
_p_create_info: *const AccelerationStructureCreateInfoNV,
_p_allocator: *const AllocationCallbacks,
_p_acceleration_structure: *mut AccelerationStructureNV,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_acceleration_structure_nv)
))
}
let raw_name = stringify!(vkCreateAccelerationStructureNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_acceleration_structure_nv
} else {
::std::mem::transmute(val)
}
},
destroy_acceleration_structure_nv: unsafe {
extern "system" fn destroy_acceleration_structure_nv(
_device: Device,
_acceleration_structure: AccelerationStructureNV,
_p_allocator: *const AllocationCallbacks,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(destroy_acceleration_structure_nv)
))
}
let raw_name = stringify!(vkDestroyAccelerationStructureNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
destroy_acceleration_structure_nv
} else {
::std::mem::transmute(val)
}
},
get_acceleration_structure_memory_requirements_nv: unsafe {
extern "system" fn get_acceleration_structure_memory_requirements_nv(
_device: Device,
_p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
_p_memory_requirements: *mut MemoryRequirements2KHR,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_acceleration_structure_memory_requirements_nv)
))
}
let raw_name = stringify!(vkGetAccelerationStructureMemoryRequirementsNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_acceleration_structure_memory_requirements_nv
} else {
::std::mem::transmute(val)
}
},
bind_acceleration_structure_memory_nv: unsafe {
extern "system" fn bind_acceleration_structure_memory_nv(
_device: Device,
_bind_info_count: u32,
_p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(bind_acceleration_structure_memory_nv)
))
}
let raw_name = stringify!(vkBindAccelerationStructureMemoryNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
bind_acceleration_structure_memory_nv
} else {
::std::mem::transmute(val)
}
},
cmd_build_acceleration_structure_nv: unsafe {
extern "system" fn cmd_build_acceleration_structure_nv(
_command_buffer: CommandBuffer,
_p_info: *const AccelerationStructureInfoNV,
_instance_data: Buffer,
_instance_offset: DeviceSize,
_update: Bool32,
_dst: AccelerationStructureNV,
_src: AccelerationStructureNV,
_scratch: Buffer,
_scratch_offset: DeviceSize,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_build_acceleration_structure_nv)
))
}
let raw_name = stringify!(vkCmdBuildAccelerationStructureNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_build_acceleration_structure_nv
} else {
::std::mem::transmute(val)
}
},
cmd_copy_acceleration_structure_nv: unsafe {
extern "system" fn cmd_copy_acceleration_structure_nv(
_command_buffer: CommandBuffer,
_dst: AccelerationStructureNV,
_src: AccelerationStructureNV,
_mode: CopyAccelerationStructureModeNV,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_copy_acceleration_structure_nv)
))
}
let raw_name = stringify!(vkCmdCopyAccelerationStructureNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_copy_acceleration_structure_nv
} else {
::std::mem::transmute(val)
}
},
cmd_trace_rays_nv: unsafe {
extern "system" fn cmd_trace_rays_nv(
_command_buffer: CommandBuffer,
_raygen_shader_binding_table_buffer: Buffer,
_raygen_shader_binding_offset: DeviceSize,
_miss_shader_binding_table_buffer: Buffer,
_miss_shader_binding_offset: DeviceSize,
_miss_shader_binding_stride: DeviceSize,
_hit_shader_binding_table_buffer: Buffer,
_hit_shader_binding_offset: DeviceSize,
_hit_shader_binding_stride: DeviceSize,
_callable_shader_binding_table_buffer: Buffer,
_callable_shader_binding_offset: DeviceSize,
_callable_shader_binding_stride: DeviceSize,
_width: u32,
_height: u32,
_depth: u32,
) -> c_void {
panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_nv)))
}
let raw_name = stringify!(vkCmdTraceRaysNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_trace_rays_nv
} else {
::std::mem::transmute(val)
}
},
create_ray_tracing_pipelines_nv: unsafe {
extern "system" fn create_ray_tracing_pipelines_nv(
_device: Device,
_pipeline_cache: PipelineCache,
_create_info_count: u32,
_p_create_infos: *const RayTracingPipelineCreateInfoNV,
_p_allocator: *const AllocationCallbacks,
_p_pipelines: *mut Pipeline,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_ray_tracing_pipelines_nv)
))
}
let raw_name = stringify!(vkCreateRayTracingPipelinesNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_ray_tracing_pipelines_nv
} else {
::std::mem::transmute(val)
}
},
get_ray_tracing_shader_group_handles_nv: unsafe {
extern "system" fn get_ray_tracing_shader_group_handles_nv(
_device: Device,
_pipeline: Pipeline,
_first_group: u32,
_group_count: u32,
_data_size: usize,
_p_data: *mut c_void,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_ray_tracing_shader_group_handles_nv)
))
}
let raw_name = stringify!(vkGetRayTracingShaderGroupHandlesNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_ray_tracing_shader_group_handles_nv
} else {
::std::mem::transmute(val)
}
},
get_acceleration_structure_handle_nv: unsafe {
extern "system" fn get_acceleration_structure_handle_nv(
_device: Device,
_acceleration_structure: AccelerationStructureNV,
_data_size: usize,
_p_data: *mut c_void,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_acceleration_structure_handle_nv)
))
}
let raw_name = stringify!(vkGetAccelerationStructureHandleNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_acceleration_structure_handle_nv
} else {
::std::mem::transmute(val)
}
},
cmd_write_acceleration_structures_properties_nv: unsafe {
extern "system" fn cmd_write_acceleration_structures_properties_nv(
_command_buffer: CommandBuffer,
_acceleration_structure_count: u32,
_p_acceleration_structures: *const AccelerationStructureNV,
_query_type: QueryType,
_query_pool: QueryPool,
_first_query: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_write_acceleration_structures_properties_nv)
))
}
let raw_name = stringify!(vkCmdWriteAccelerationStructuresPropertiesNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_write_acceleration_structures_properties_nv
} else {
::std::mem::transmute(val)
}
},
compile_deferred_nv: unsafe {
extern "system" fn compile_deferred_nv(
_device: Device,
_pipeline: Pipeline,
_shader: u32,
) -> Result {
panic!(concat!("Unable to load ", stringify!(compile_deferred_nv)))
}
let raw_name = stringify!(vkCompileDeferredNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
compile_deferred_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateAccelerationStructureNV.html>"]
pub unsafe fn create_acceleration_structure_nv(
&self,
device: Device,
p_create_info: *const AccelerationStructureCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_acceleration_structure: *mut AccelerationStructureNV,
) -> Result {
(self.create_acceleration_structure_nv)(
device,
p_create_info,
p_allocator,
p_acceleration_structure,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkDestroyAccelerationStructureNV.html>"]
pub unsafe fn destroy_acceleration_structure_nv(
&self,
device: Device,
acceleration_structure: AccelerationStructureNV,
p_allocator: *const AllocationCallbacks,
) -> c_void {
(self.destroy_acceleration_structure_nv)(device, acceleration_structure, p_allocator)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html>"]
pub unsafe fn get_acceleration_structure_memory_requirements_nv(
&self,
device: Device,
p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
p_memory_requirements: *mut MemoryRequirements2KHR,
) -> c_void {
(self.get_acceleration_structure_memory_requirements_nv)(
device,
p_info,
p_memory_requirements,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkBindAccelerationStructureMemoryNV.html>"]
pub unsafe fn bind_acceleration_structure_memory_nv(
&self,
device: Device,
bind_info_count: u32,
p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
) -> Result {
(self.bind_acceleration_structure_memory_nv)(device, bind_info_count, p_bind_infos)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdBuildAccelerationStructureNV.html>"]
pub unsafe fn cmd_build_acceleration_structure_nv(
&self,
command_buffer: CommandBuffer,
p_info: *const AccelerationStructureInfoNV,
instance_data: Buffer,
instance_offset: DeviceSize,
update: Bool32,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
scratch: Buffer,
scratch_offset: DeviceSize,
) -> c_void {
(self.cmd_build_acceleration_structure_nv)(
command_buffer,
p_info,
instance_data,
instance_offset,
update,
dst,
src,
scratch,
scratch_offset,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdCopyAccelerationStructureNV.html>"]
pub unsafe fn cmd_copy_acceleration_structure_nv(
&self,
command_buffer: CommandBuffer,
dst: AccelerationStructureNV,
src: AccelerationStructureNV,
mode: CopyAccelerationStructureModeNV,
) -> c_void {
(self.cmd_copy_acceleration_structure_nv)(command_buffer, dst, src, mode)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdTraceRaysNV.html>"]
pub unsafe fn cmd_trace_rays_nv(
&self,
command_buffer: CommandBuffer,
raygen_shader_binding_table_buffer: Buffer,
raygen_shader_binding_offset: DeviceSize,
miss_shader_binding_table_buffer: Buffer,
miss_shader_binding_offset: DeviceSize,
miss_shader_binding_stride: DeviceSize,
hit_shader_binding_table_buffer: Buffer,
hit_shader_binding_offset: DeviceSize,
hit_shader_binding_stride: DeviceSize,
callable_shader_binding_table_buffer: Buffer,
callable_shader_binding_offset: DeviceSize,
callable_shader_binding_stride: DeviceSize,
width: u32,
height: u32,
depth: u32,
) -> c_void {
(self.cmd_trace_rays_nv)(
command_buffer,
raygen_shader_binding_table_buffer,
raygen_shader_binding_offset,
miss_shader_binding_table_buffer,
miss_shader_binding_offset,
miss_shader_binding_stride,
hit_shader_binding_table_buffer,
hit_shader_binding_offset,
hit_shader_binding_stride,
callable_shader_binding_table_buffer,
callable_shader_binding_offset,
callable_shader_binding_stride,
width,
height,
depth,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateRayTracingPipelinesNV.html>"]
pub unsafe fn create_ray_tracing_pipelines_nv(
&self,
device: Device,
pipeline_cache: PipelineCache,
create_info_count: u32,
p_create_infos: *const RayTracingPipelineCreateInfoNV,
p_allocator: *const AllocationCallbacks,
p_pipelines: *mut Pipeline,
) -> Result {
(self.create_ray_tracing_pipelines_nv)(
device,
pipeline_cache,
create_info_count,
p_create_infos,
p_allocator,
p_pipelines,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetRayTracingShaderGroupHandlesNV.html>"]
pub unsafe fn get_ray_tracing_shader_group_handles_nv(
&self,
device: Device,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data_size: usize,
p_data: *mut c_void,
) -> Result {
(self.get_ray_tracing_shader_group_handles_nv)(
device,
pipeline,
first_group,
group_count,
data_size,
p_data,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetAccelerationStructureHandleNV.html>"]
pub unsafe fn get_acceleration_structure_handle_nv(
&self,
device: Device,
acceleration_structure: AccelerationStructureNV,
data_size: usize,
p_data: *mut c_void,
) -> Result {
(self.get_acceleration_structure_handle_nv)(
device,
acceleration_structure,
data_size,
p_data,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html>"]
pub unsafe fn cmd_write_acceleration_structures_properties_nv(
&self,
command_buffer: CommandBuffer,
acceleration_structure_count: u32,
p_acceleration_structures: *const AccelerationStructureNV,
query_type: QueryType,
query_pool: QueryPool,
first_query: u32,
) -> c_void {
(self.cmd_write_acceleration_structures_properties_nv)(
command_buffer,
acceleration_structure_count,
p_acceleration_structures,
query_type,
query_pool,
first_query,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCompileDeferredNV.html>"]
pub unsafe fn compile_deferred_nv(
&self,
device: Device,
pipeline: Pipeline,
shader: u32,
) -> Result {
(self.compile_deferred_nv)(device, pipeline, shader)
}
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const RAY_TRACING_PIPELINE_CREATE_INFO_NV: Self = StructureType(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const ACCELERATION_STRUCTURE_CREATE_INFO_NV: Self = StructureType(1000165001);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const GEOMETRY_NV: Self = StructureType(1000165003);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const GEOMETRY_TRIANGLES_NV: Self = StructureType(1000165004);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const GEOMETRY_AABB_NV: Self = StructureType(1000165005);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: Self = StructureType(1000165006);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: Self = StructureType(1000165007);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: Self = StructureType(1000165008);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: Self = StructureType(1000165009);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: Self = StructureType(1000165011);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl StructureType {
pub const ACCELERATION_STRUCTURE_INFO_NV: Self = StructureType(1000165012);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const RAYGEN_NV: Self = ShaderStageFlags(0b100000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const ANY_HIT_NV: Self = ShaderStageFlags(0b1000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const CLOSEST_HIT_NV: Self = ShaderStageFlags(0b10000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const MISS_NV: Self = ShaderStageFlags(0b100000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const INTERSECTION_NV: Self = ShaderStageFlags(0b1000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ShaderStageFlags {
pub const CALLABLE_NV: Self = ShaderStageFlags(0b10000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl PipelineStageFlags {
pub const RAY_TRACING_SHADER_NV: Self = PipelineStageFlags(0b1000000000000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl PipelineStageFlags {
pub const ACCELERATION_STRUCTURE_BUILD_NV: Self =
PipelineStageFlags(0b10000000000000000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl BufferUsageFlags {
pub const RAY_TRACING_NV: Self = BufferUsageFlags(0b10000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl PipelineBindPoint {
pub const RAY_TRACING_NV: Self = PipelineBindPoint(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl DescriptorType {
pub const ACCELERATION_STRUCTURE_NV: Self = DescriptorType(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl AccessFlags {
pub const ACCELERATION_STRUCTURE_READ_NV: Self = AccessFlags(0b1000000000000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl AccessFlags {
pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = AccessFlags(0b10000000000000000000000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl QueryType {
pub const ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: Self = QueryType(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl PipelineCreateFlags {
pub const DEFER_COMPILE_NV: Self = PipelineCreateFlags(0b100000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl ObjectType {
pub const ACCELERATION_STRUCTURE_NV: Self = ObjectType(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl DebugReportObjectTypeEXT {
pub const ACCELERATION_STRUCTURE_NV: Self = DebugReportObjectTypeEXT(1000165000);
}
#[doc = "Generated from \'VK_NV_ray_tracing\'"]
impl IndexType {
pub const NONE_NV: Self = IndexType(1000165000);
}
impl NvRepresentativeFragmentTestFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_representative_fragment_test\0")
.expect("Wrong extension string")
}
}
pub struct NvRepresentativeFragmentTestFn {}
unsafe impl Send for NvRepresentativeFragmentTestFn {}
unsafe impl Sync for NvRepresentativeFragmentTestFn {}
impl ::std::clone::Clone for NvRepresentativeFragmentTestFn {
fn clone(&self) -> Self {
NvRepresentativeFragmentTestFn {}
}
}
impl NvRepresentativeFragmentTestFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvRepresentativeFragmentTestFn {}
}
}
#[doc = "Generated from \'VK_NV_representative_fragment_test\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: Self =
StructureType(1000166000);
}
#[doc = "Generated from \'VK_NV_representative_fragment_test\'"]
impl StructureType {
pub const PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: Self =
StructureType(1000166001);
}
impl NvExtension168Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_168\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension168Fn {}
unsafe impl Send for NvExtension168Fn {}
unsafe impl Sync for NvExtension168Fn {}
impl ::std::clone::Clone for NvExtension168Fn {
fn clone(&self) -> Self {
NvExtension168Fn {}
}
}
impl NvExtension168Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension168Fn {}
}
}
impl KhrMaintenance3Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_maintenance3\0")
.expect("Wrong extension string")
}
}
pub struct KhrMaintenance3Fn {}
unsafe impl Send for KhrMaintenance3Fn {}
unsafe impl Sync for KhrMaintenance3Fn {}
impl ::std::clone::Clone for KhrMaintenance3Fn {
fn clone(&self) -> Self {
KhrMaintenance3Fn {}
}
}
impl KhrMaintenance3Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrMaintenance3Fn {}
}
}
impl KhrDrawIndirectCountFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_draw_indirect_count\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndirectCountKHR = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawIndexedIndirectCountKHR = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void;
pub struct KhrDrawIndirectCountFn {
pub cmd_draw_indirect_count_khr: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void,
pub cmd_draw_indexed_indirect_count_khr: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void,
}
unsafe impl Send for KhrDrawIndirectCountFn {}
unsafe impl Sync for KhrDrawIndirectCountFn {}
impl ::std::clone::Clone for KhrDrawIndirectCountFn {
fn clone(&self) -> Self {
KhrDrawIndirectCountFn {
cmd_draw_indirect_count_khr: self.cmd_draw_indirect_count_khr,
cmd_draw_indexed_indirect_count_khr: self.cmd_draw_indexed_indirect_count_khr,
}
}
}
impl KhrDrawIndirectCountFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDrawIndirectCountFn {
cmd_draw_indirect_count_khr: unsafe {
extern "system" fn cmd_draw_indirect_count_khr(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indirect_count_khr)
))
}
let raw_name = stringify!(vkCmdDrawIndirectCountKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indirect_count_khr
} else {
::std::mem::transmute(val)
}
},
cmd_draw_indexed_indirect_count_khr: unsafe {
extern "system" fn cmd_draw_indexed_indirect_count_khr(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indexed_indirect_count_khr)
))
}
let raw_name = stringify!(vkCmdDrawIndexedIndirectCountKHR);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_indexed_indirect_count_khr
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndirectCountKHR.html>"]
pub unsafe fn cmd_draw_indirect_count_khr(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indirect_count_khr)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawIndexedIndirectCountKHR.html>"]
pub unsafe fn cmd_draw_indexed_indirect_count_khr(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_indexed_indirect_count_khr)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
)
}
}
impl QcomExtension171Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_QCOM_extension_171\0")
.expect("Wrong extension string")
}
}
pub struct QcomExtension171Fn {}
unsafe impl Send for QcomExtension171Fn {}
unsafe impl Sync for QcomExtension171Fn {}
impl ::std::clone::Clone for QcomExtension171Fn {
fn clone(&self) -> Self {
QcomExtension171Fn {}
}
}
impl QcomExtension171Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
QcomExtension171Fn {}
}
}
impl QcomExtension172Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_QCOM_extension_172\0")
.expect("Wrong extension string")
}
}
pub struct QcomExtension172Fn {}
unsafe impl Send for QcomExtension172Fn {}
unsafe impl Sync for QcomExtension172Fn {}
impl ::std::clone::Clone for QcomExtension172Fn {
fn clone(&self) -> Self {
QcomExtension172Fn {}
}
}
impl QcomExtension172Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
QcomExtension172Fn {}
}
}
impl QcomExtension173Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_QCOM_extension_173\0")
.expect("Wrong extension string")
}
}
pub struct QcomExtension173Fn {}
unsafe impl Send for QcomExtension173Fn {}
unsafe impl Sync for QcomExtension173Fn {}
impl ::std::clone::Clone for QcomExtension173Fn {
fn clone(&self) -> Self {
QcomExtension173Fn {}
}
}
impl QcomExtension173Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
QcomExtension173Fn {}
}
}
impl QcomExtension174Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_QCOM_extension_174\0")
.expect("Wrong extension string")
}
}
pub struct QcomExtension174Fn {}
unsafe impl Send for QcomExtension174Fn {}
unsafe impl Sync for QcomExtension174Fn {}
impl ::std::clone::Clone for QcomExtension174Fn {
fn clone(&self) -> Self {
QcomExtension174Fn {}
}
}
impl QcomExtension174Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
QcomExtension174Fn {}
}
}
impl ExtGlobalPriorityFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_global_priority\0")
.expect("Wrong extension string")
}
}
pub struct ExtGlobalPriorityFn {}
unsafe impl Send for ExtGlobalPriorityFn {}
unsafe impl Sync for ExtGlobalPriorityFn {}
impl ::std::clone::Clone for ExtGlobalPriorityFn {
fn clone(&self) -> Self {
ExtGlobalPriorityFn {}
}
}
impl ExtGlobalPriorityFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtGlobalPriorityFn {}
}
}
#[doc = "Generated from \'VK_EXT_global_priority\'"]
impl StructureType {
pub const DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: Self = StructureType(1000174000);
}
#[doc = "Generated from \'VK_EXT_global_priority\'"]
impl Result {
pub const ERROR_NOT_PERMITTED_EXT: Self = Result(-1000174001);
}
impl ExtExtension176Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_176\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension176Fn {}
unsafe impl Send for ExtExtension176Fn {}
unsafe impl Sync for ExtExtension176Fn {}
impl ::std::clone::Clone for ExtExtension176Fn {
fn clone(&self) -> Self {
ExtExtension176Fn {}
}
}
impl ExtExtension176Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension176Fn {}
}
}
impl ExtExtension177Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_177\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension177Fn {}
unsafe impl Send for ExtExtension177Fn {}
unsafe impl Sync for ExtExtension177Fn {}
impl ::std::clone::Clone for ExtExtension177Fn {
fn clone(&self) -> Self {
ExtExtension177Fn {}
}
}
impl ExtExtension177Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension177Fn {}
}
}
impl Khr8bitStorageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_8bit_storage\0")
.expect("Wrong extension string")
}
}
pub struct Khr8bitStorageFn {}
unsafe impl Send for Khr8bitStorageFn {}
unsafe impl Sync for Khr8bitStorageFn {}
impl ::std::clone::Clone for Khr8bitStorageFn {
fn clone(&self) -> Self {
Khr8bitStorageFn {}
}
}
impl Khr8bitStorageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
Khr8bitStorageFn {}
}
}
#[doc = "Generated from \'VK_KHR_8bit_storage\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: Self = StructureType(1000177000);
}
impl ExtExternalMemoryHostFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_external_memory_host\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetMemoryHostPointerPropertiesEXT = extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
p_host_pointer: *const c_void,
p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
) -> Result;
pub struct ExtExternalMemoryHostFn {
pub get_memory_host_pointer_properties_ext: extern "system" fn(
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
p_host_pointer: *const c_void,
p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
) -> Result,
}
unsafe impl Send for ExtExternalMemoryHostFn {}
unsafe impl Sync for ExtExternalMemoryHostFn {}
impl ::std::clone::Clone for ExtExternalMemoryHostFn {
fn clone(&self) -> Self {
ExtExternalMemoryHostFn {
get_memory_host_pointer_properties_ext: self.get_memory_host_pointer_properties_ext,
}
}
}
impl ExtExternalMemoryHostFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExternalMemoryHostFn {
get_memory_host_pointer_properties_ext: unsafe {
extern "system" fn get_memory_host_pointer_properties_ext(
_device: Device,
_handle_type: ExternalMemoryHandleTypeFlags,
_p_host_pointer: *const c_void,
_p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_host_pointer_properties_ext)
))
}
let raw_name = stringify!(vkGetMemoryHostPointerPropertiesEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_memory_host_pointer_properties_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetMemoryHostPointerPropertiesEXT.html>"]
pub unsafe fn get_memory_host_pointer_properties_ext(
&self,
device: Device,
handle_type: ExternalMemoryHandleTypeFlags,
p_host_pointer: *const c_void,
p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
) -> Result {
(self.get_memory_host_pointer_properties_ext)(
device,
handle_type,
p_host_pointer,
p_memory_host_pointer_properties,
)
}
}
#[doc = "Generated from \'VK_EXT_external_memory_host\'"]
impl StructureType {
pub const IMPORT_MEMORY_HOST_POINTER_INFO_EXT: Self = StructureType(1000178000);
}
#[doc = "Generated from \'VK_EXT_external_memory_host\'"]
impl StructureType {
pub const MEMORY_HOST_POINTER_PROPERTIES_EXT: Self = StructureType(1000178001);
}
#[doc = "Generated from \'VK_EXT_external_memory_host\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: Self = StructureType(1000178002);
}
#[doc = "Generated from \'VK_EXT_external_memory_host\'"]
impl ExternalMemoryHandleTypeFlags {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION: Self =
ExternalMemoryHandleTypeFlags(0b10000000);
}
#[doc = "Generated from \'VK_EXT_external_memory_host\'"]
impl ExternalMemoryHandleTypeFlags {
pub const EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY: Self =
ExternalMemoryHandleTypeFlags(0b100000000);
}
impl AmdBufferMarkerFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_buffer_marker\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdWriteBufferMarkerAMD = extern "system" fn(
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
dst_buffer: Buffer,
dst_offset: DeviceSize,
marker: u32,
) -> c_void;
pub struct AmdBufferMarkerFn {
pub cmd_write_buffer_marker_amd: extern "system" fn(
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
dst_buffer: Buffer,
dst_offset: DeviceSize,
marker: u32,
) -> c_void,
}
unsafe impl Send for AmdBufferMarkerFn {}
unsafe impl Sync for AmdBufferMarkerFn {}
impl ::std::clone::Clone for AmdBufferMarkerFn {
fn clone(&self) -> Self {
AmdBufferMarkerFn {
cmd_write_buffer_marker_amd: self.cmd_write_buffer_marker_amd,
}
}
}
impl AmdBufferMarkerFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdBufferMarkerFn {
cmd_write_buffer_marker_amd: unsafe {
extern "system" fn cmd_write_buffer_marker_amd(
_command_buffer: CommandBuffer,
_pipeline_stage: PipelineStageFlags,
_dst_buffer: Buffer,
_dst_offset: DeviceSize,
_marker: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_write_buffer_marker_amd)
))
}
let raw_name = stringify!(vkCmdWriteBufferMarkerAMD);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_write_buffer_marker_amd
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdWriteBufferMarkerAMD.html>"]
pub unsafe fn cmd_write_buffer_marker_amd(
&self,
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
dst_buffer: Buffer,
dst_offset: DeviceSize,
marker: u32,
) -> c_void {
(self.cmd_write_buffer_marker_amd)(
command_buffer,
pipeline_stage,
dst_buffer,
dst_offset,
marker,
)
}
}
impl KhrShaderAtomicInt64Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_shader_atomic_int64\0")
.expect("Wrong extension string")
}
}
pub struct KhrShaderAtomicInt64Fn {}
unsafe impl Send for KhrShaderAtomicInt64Fn {}
unsafe impl Sync for KhrShaderAtomicInt64Fn {}
impl ::std::clone::Clone for KhrShaderAtomicInt64Fn {
fn clone(&self) -> Self {
KhrShaderAtomicInt64Fn {}
}
}
impl KhrShaderAtomicInt64Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrShaderAtomicInt64Fn {}
}
}
#[doc = "Generated from \'VK_KHR_shader_atomic_int64\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: Self = StructureType(1000180000);
}
impl AmdExtension182Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_182\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension182Fn {}
unsafe impl Send for AmdExtension182Fn {}
unsafe impl Sync for AmdExtension182Fn {}
impl ::std::clone::Clone for AmdExtension182Fn {
fn clone(&self) -> Self {
AmdExtension182Fn {}
}
}
impl AmdExtension182Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension182Fn {}
}
}
impl AmdExtension183Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_183\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension183Fn {}
unsafe impl Send for AmdExtension183Fn {}
unsafe impl Sync for AmdExtension183Fn {}
impl ::std::clone::Clone for AmdExtension183Fn {
fn clone(&self) -> Self {
AmdExtension183Fn {}
}
}
impl AmdExtension183Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension183Fn {}
}
}
impl AmdExtension184Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_184\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension184Fn {}
unsafe impl Send for AmdExtension184Fn {}
unsafe impl Sync for AmdExtension184Fn {}
impl ::std::clone::Clone for AmdExtension184Fn {
fn clone(&self) -> Self {
AmdExtension184Fn {}
}
}
impl AmdExtension184Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension184Fn {}
}
}
impl ExtCalibratedTimestampsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_calibrated_timestamps\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = extern "system" fn(
physical_device: PhysicalDevice,
p_time_domain_count: *mut u32,
p_time_domains: *mut TimeDomainEXT,
) -> Result;
#[allow(non_camel_case_types)]
pub type PFN_vkGetCalibratedTimestampsEXT = extern "system" fn(
device: Device,
timestamp_count: u32,
p_timestamp_infos: *const CalibratedTimestampInfoEXT,
p_timestamps: *mut u64,
p_max_deviation: *mut u64,
) -> Result;
pub struct ExtCalibratedTimestampsFn {
pub get_physical_device_calibrateable_time_domains_ext: extern "system" fn(
physical_device: PhysicalDevice,
p_time_domain_count: *mut u32,
p_time_domains: *mut TimeDomainEXT,
) -> Result,
pub get_calibrated_timestamps_ext: extern "system" fn(
device: Device,
timestamp_count: u32,
p_timestamp_infos: *const CalibratedTimestampInfoEXT,
p_timestamps: *mut u64,
p_max_deviation: *mut u64,
) -> Result,
}
unsafe impl Send for ExtCalibratedTimestampsFn {}
unsafe impl Sync for ExtCalibratedTimestampsFn {}
impl ::std::clone::Clone for ExtCalibratedTimestampsFn {
fn clone(&self) -> Self {
ExtCalibratedTimestampsFn {
get_physical_device_calibrateable_time_domains_ext: self
.get_physical_device_calibrateable_time_domains_ext,
get_calibrated_timestamps_ext: self.get_calibrated_timestamps_ext,
}
}
}
impl ExtCalibratedTimestampsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtCalibratedTimestampsFn {
get_physical_device_calibrateable_time_domains_ext: unsafe {
extern "system" fn get_physical_device_calibrateable_time_domains_ext(
_physical_device: PhysicalDevice,
_p_time_domain_count: *mut u32,
_p_time_domains: *mut TimeDomainEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_physical_device_calibrateable_time_domains_ext)
))
}
let raw_name = stringify!(vkGetPhysicalDeviceCalibrateableTimeDomainsEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_physical_device_calibrateable_time_domains_ext
} else {
::std::mem::transmute(val)
}
},
get_calibrated_timestamps_ext: unsafe {
extern "system" fn get_calibrated_timestamps_ext(
_device: Device,
_timestamp_count: u32,
_p_timestamp_infos: *const CalibratedTimestampInfoEXT,
_p_timestamps: *mut u64,
_p_max_deviation: *mut u64,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_calibrated_timestamps_ext)
))
}
let raw_name = stringify!(vkGetCalibratedTimestampsEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_calibrated_timestamps_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html>"]
pub unsafe fn get_physical_device_calibrateable_time_domains_ext(
&self,
physical_device: PhysicalDevice,
p_time_domain_count: *mut u32,
p_time_domains: *mut TimeDomainEXT,
) -> Result {
(self.get_physical_device_calibrateable_time_domains_ext)(
physical_device,
p_time_domain_count,
p_time_domains,
)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetCalibratedTimestampsEXT.html>"]
pub unsafe fn get_calibrated_timestamps_ext(
&self,
device: Device,
timestamp_count: u32,
p_timestamp_infos: *const CalibratedTimestampInfoEXT,
p_timestamps: *mut u64,
p_max_deviation: *mut u64,
) -> Result {
(self.get_calibrated_timestamps_ext)(
device,
timestamp_count,
p_timestamp_infos,
p_timestamps,
p_max_deviation,
)
}
}
#[doc = "Generated from \'VK_EXT_calibrated_timestamps\'"]
impl StructureType {
pub const CALIBRATED_TIMESTAMP_INFO_EXT: Self = StructureType(1000184000);
}
impl AmdShaderCorePropertiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_shader_core_properties\0")
.expect("Wrong extension string")
}
}
pub struct AmdShaderCorePropertiesFn {}
unsafe impl Send for AmdShaderCorePropertiesFn {}
unsafe impl Sync for AmdShaderCorePropertiesFn {}
impl ::std::clone::Clone for AmdShaderCorePropertiesFn {
fn clone(&self) -> Self {
AmdShaderCorePropertiesFn {}
}
}
impl AmdShaderCorePropertiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdShaderCorePropertiesFn {}
}
}
#[doc = "Generated from \'VK_AMD_shader_core_properties\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD: Self = StructureType(1000185000);
}
impl AmdExtension187Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_187\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension187Fn {}
unsafe impl Send for AmdExtension187Fn {}
unsafe impl Sync for AmdExtension187Fn {}
impl ::std::clone::Clone for AmdExtension187Fn {
fn clone(&self) -> Self {
AmdExtension187Fn {}
}
}
impl AmdExtension187Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension187Fn {}
}
}
impl AmdExtension188Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_188\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension188Fn {}
unsafe impl Send for AmdExtension188Fn {}
unsafe impl Sync for AmdExtension188Fn {}
impl ::std::clone::Clone for AmdExtension188Fn {
fn clone(&self) -> Self {
AmdExtension188Fn {}
}
}
impl AmdExtension188Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension188Fn {}
}
}
impl AmdExtension189Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_189\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension189Fn {}
unsafe impl Send for AmdExtension189Fn {}
unsafe impl Sync for AmdExtension189Fn {}
impl ::std::clone::Clone for AmdExtension189Fn {
fn clone(&self) -> Self {
AmdExtension189Fn {}
}
}
impl AmdExtension189Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension189Fn {}
}
}
impl AmdMemoryOverallocationBehaviorFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_memory_overallocation_behavior\0")
.expect("Wrong extension string")
}
}
pub struct AmdMemoryOverallocationBehaviorFn {}
unsafe impl Send for AmdMemoryOverallocationBehaviorFn {}
unsafe impl Sync for AmdMemoryOverallocationBehaviorFn {}
impl ::std::clone::Clone for AmdMemoryOverallocationBehaviorFn {
fn clone(&self) -> Self {
AmdMemoryOverallocationBehaviorFn {}
}
}
impl AmdMemoryOverallocationBehaviorFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdMemoryOverallocationBehaviorFn {}
}
}
#[doc = "Generated from \'VK_AMD_memory_overallocation_behavior\'"]
impl StructureType {
pub const DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: Self = StructureType(1000189000);
}
impl ExtVertexAttributeDivisorFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_vertex_attribute_divisor\0")
.expect("Wrong extension string")
}
}
pub struct ExtVertexAttributeDivisorFn {}
unsafe impl Send for ExtVertexAttributeDivisorFn {}
unsafe impl Sync for ExtVertexAttributeDivisorFn {}
impl ::std::clone::Clone for ExtVertexAttributeDivisorFn {
fn clone(&self) -> Self {
ExtVertexAttributeDivisorFn {}
}
}
impl ExtVertexAttributeDivisorFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtVertexAttributeDivisorFn {}
}
}
#[doc = "Generated from \'VK_EXT_vertex_attribute_divisor\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: Self =
StructureType(1000190000);
}
#[doc = "Generated from \'VK_EXT_vertex_attribute_divisor\'"]
impl StructureType {
pub const PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT: Self = StructureType(1000190001);
}
#[doc = "Generated from \'VK_EXT_vertex_attribute_divisor\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: Self =
StructureType(1000190002);
}
impl GoogleExtension192Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_192\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension192Fn {}
unsafe impl Send for GoogleExtension192Fn {}
unsafe impl Sync for GoogleExtension192Fn {}
impl ::std::clone::Clone for GoogleExtension192Fn {
fn clone(&self) -> Self {
GoogleExtension192Fn {}
}
}
impl GoogleExtension192Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension192Fn {}
}
}
impl GoogleExtension193Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_193\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension193Fn {}
unsafe impl Send for GoogleExtension193Fn {}
unsafe impl Sync for GoogleExtension193Fn {}
impl ::std::clone::Clone for GoogleExtension193Fn {
fn clone(&self) -> Self {
GoogleExtension193Fn {}
}
}
impl GoogleExtension193Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension193Fn {}
}
}
impl GoogleExtension194Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_194\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension194Fn {}
unsafe impl Send for GoogleExtension194Fn {}
unsafe impl Sync for GoogleExtension194Fn {}
impl ::std::clone::Clone for GoogleExtension194Fn {
fn clone(&self) -> Self {
GoogleExtension194Fn {}
}
}
impl GoogleExtension194Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension194Fn {}
}
}
impl GoogleExtension195Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_195\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension195Fn {}
unsafe impl Send for GoogleExtension195Fn {}
unsafe impl Sync for GoogleExtension195Fn {}
impl ::std::clone::Clone for GoogleExtension195Fn {
fn clone(&self) -> Self {
GoogleExtension195Fn {}
}
}
impl GoogleExtension195Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension195Fn {}
}
}
impl GoogleExtension196Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_196\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension196Fn {}
unsafe impl Send for GoogleExtension196Fn {}
unsafe impl Sync for GoogleExtension196Fn {}
impl ::std::clone::Clone for GoogleExtension196Fn {
fn clone(&self) -> Self {
GoogleExtension196Fn {}
}
}
impl GoogleExtension196Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension196Fn {}
}
}
impl KhrDriverPropertiesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_driver_properties\0")
.expect("Wrong extension string")
}
}
pub struct KhrDriverPropertiesFn {}
unsafe impl Send for KhrDriverPropertiesFn {}
unsafe impl Sync for KhrDriverPropertiesFn {}
impl ::std::clone::Clone for KhrDriverPropertiesFn {
fn clone(&self) -> Self {
KhrDriverPropertiesFn {}
}
}
impl KhrDriverPropertiesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDriverPropertiesFn {}
}
}
#[doc = "Generated from \'VK_KHR_driver_properties\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR: Self = StructureType(1000196000);
}
impl KhrShaderFloatControlsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_shader_float_controls\0")
.expect("Wrong extension string")
}
}
pub struct KhrShaderFloatControlsFn {}
unsafe impl Send for KhrShaderFloatControlsFn {}
unsafe impl Sync for KhrShaderFloatControlsFn {}
impl ::std::clone::Clone for KhrShaderFloatControlsFn {
fn clone(&self) -> Self {
KhrShaderFloatControlsFn {}
}
}
impl KhrShaderFloatControlsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrShaderFloatControlsFn {}
}
}
#[doc = "Generated from \'VK_KHR_shader_float_controls\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR: Self = StructureType(1000197000);
}
impl NvShaderSubgroupPartitionedFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_shader_subgroup_partitioned\0")
.expect("Wrong extension string")
}
}
pub struct NvShaderSubgroupPartitionedFn {}
unsafe impl Send for NvShaderSubgroupPartitionedFn {}
unsafe impl Sync for NvShaderSubgroupPartitionedFn {}
impl ::std::clone::Clone for NvShaderSubgroupPartitionedFn {
fn clone(&self) -> Self {
NvShaderSubgroupPartitionedFn {}
}
}
impl NvShaderSubgroupPartitionedFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvShaderSubgroupPartitionedFn {}
}
}
#[doc = "Generated from \'VK_NV_shader_subgroup_partitioned\'"]
impl SubgroupFeatureFlags {
pub const PARTITIONED_NV: Self = SubgroupFeatureFlags(0b100000000);
}
impl KhrDepthStencilResolveFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_depth_stencil_resolve\0")
.expect("Wrong extension string")
}
}
pub struct KhrDepthStencilResolveFn {}
unsafe impl Send for KhrDepthStencilResolveFn {}
unsafe impl Sync for KhrDepthStencilResolveFn {}
impl ::std::clone::Clone for KhrDepthStencilResolveFn {
fn clone(&self) -> Self {
KhrDepthStencilResolveFn {}
}
}
impl KhrDepthStencilResolveFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrDepthStencilResolveFn {}
}
}
#[doc = "Generated from \'VK_KHR_depth_stencil_resolve\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR: Self =
StructureType(1000199000);
}
#[doc = "Generated from \'VK_KHR_depth_stencil_resolve\'"]
impl StructureType {
pub const SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR: Self = StructureType(1000199001);
}
impl KhrSwapchainMutableFormatFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_swapchain_mutable_format\0")
.expect("Wrong extension string")
}
}
pub struct KhrSwapchainMutableFormatFn {}
unsafe impl Send for KhrSwapchainMutableFormatFn {}
unsafe impl Sync for KhrSwapchainMutableFormatFn {}
impl ::std::clone::Clone for KhrSwapchainMutableFormatFn {
fn clone(&self) -> Self {
KhrSwapchainMutableFormatFn {}
}
}
impl KhrSwapchainMutableFormatFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrSwapchainMutableFormatFn {}
}
}
#[doc = "Generated from \'VK_KHR_swapchain_mutable_format\'"]
impl SwapchainCreateFlagsKHR {
pub const MUTABLE_FORMAT: Self = SwapchainCreateFlagsKHR(0b100);
}
impl NvComputeShaderDerivativesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_compute_shader_derivatives\0")
.expect("Wrong extension string")
}
}
pub struct NvComputeShaderDerivativesFn {}
unsafe impl Send for NvComputeShaderDerivativesFn {}
unsafe impl Sync for NvComputeShaderDerivativesFn {}
impl ::std::clone::Clone for NvComputeShaderDerivativesFn {
fn clone(&self) -> Self {
NvComputeShaderDerivativesFn {}
}
}
impl NvComputeShaderDerivativesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvComputeShaderDerivativesFn {}
}
}
#[doc = "Generated from \'VK_NV_compute_shader_derivatives\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV: Self =
StructureType(1000201000);
}
impl NvMeshShaderFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_mesh_shader\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawMeshTasksNV =
extern "system" fn(command_buffer: CommandBuffer, task_count: u32, first_task: u32) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawMeshTasksIndirectNV = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkCmdDrawMeshTasksIndirectCountNV = extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void;
pub struct NvMeshShaderFn {
pub cmd_draw_mesh_tasks_nv: extern "system" fn(
command_buffer: CommandBuffer,
task_count: u32,
first_task: u32,
) -> c_void,
pub cmd_draw_mesh_tasks_indirect_nv: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void,
pub cmd_draw_mesh_tasks_indirect_count_nv: extern "system" fn(
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void,
}
unsafe impl Send for NvMeshShaderFn {}
unsafe impl Sync for NvMeshShaderFn {}
impl ::std::clone::Clone for NvMeshShaderFn {
fn clone(&self) -> Self {
NvMeshShaderFn {
cmd_draw_mesh_tasks_nv: self.cmd_draw_mesh_tasks_nv,
cmd_draw_mesh_tasks_indirect_nv: self.cmd_draw_mesh_tasks_indirect_nv,
cmd_draw_mesh_tasks_indirect_count_nv: self.cmd_draw_mesh_tasks_indirect_count_nv,
}
}
}
impl NvMeshShaderFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvMeshShaderFn {
cmd_draw_mesh_tasks_nv: unsafe {
extern "system" fn cmd_draw_mesh_tasks_nv(
_command_buffer: CommandBuffer,
_task_count: u32,
_first_task: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_mesh_tasks_nv)
))
}
let raw_name = stringify!(vkCmdDrawMeshTasksNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_mesh_tasks_nv
} else {
::std::mem::transmute(val)
}
},
cmd_draw_mesh_tasks_indirect_nv: unsafe {
extern "system" fn cmd_draw_mesh_tasks_indirect_nv(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_mesh_tasks_indirect_nv)
))
}
let raw_name = stringify!(vkCmdDrawMeshTasksIndirectNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_mesh_tasks_indirect_nv
} else {
::std::mem::transmute(val)
}
},
cmd_draw_mesh_tasks_indirect_count_nv: unsafe {
extern "system" fn cmd_draw_mesh_tasks_indirect_count_nv(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_mesh_tasks_indirect_count_nv)
))
}
let raw_name = stringify!(vkCmdDrawMeshTasksIndirectCountNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_draw_mesh_tasks_indirect_count_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawMeshTasksNV.html>"]
pub unsafe fn cmd_draw_mesh_tasks_nv(
&self,
command_buffer: CommandBuffer,
task_count: u32,
first_task: u32,
) -> c_void {
(self.cmd_draw_mesh_tasks_nv)(command_buffer, task_count, first_task)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawMeshTasksIndirectNV.html>"]
pub unsafe fn cmd_draw_mesh_tasks_indirect_nv(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_mesh_tasks_indirect_nv)(command_buffer, buffer, offset, draw_count, stride)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdDrawMeshTasksIndirectCountNV.html>"]
pub unsafe fn cmd_draw_mesh_tasks_indirect_count_nv(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) -> c_void {
(self.cmd_draw_mesh_tasks_indirect_count_nv)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
)
}
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: Self = StructureType(1000202000);
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV: Self = StructureType(1000202001);
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl ShaderStageFlags {
pub const TASK_NV: Self = ShaderStageFlags(0b1000000);
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl ShaderStageFlags {
pub const MESH_NV: Self = ShaderStageFlags(0b10000000);
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl PipelineStageFlags {
pub const TASK_SHADER_NV: Self = PipelineStageFlags(0b10000000000000000000);
}
#[doc = "Generated from \'VK_NV_mesh_shader\'"]
impl PipelineStageFlags {
pub const MESH_SHADER_NV: Self = PipelineStageFlags(0b100000000000000000000);
}
impl NvFragmentShaderBarycentricFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_fragment_shader_barycentric\0")
.expect("Wrong extension string")
}
}
pub struct NvFragmentShaderBarycentricFn {}
unsafe impl Send for NvFragmentShaderBarycentricFn {}
unsafe impl Sync for NvFragmentShaderBarycentricFn {}
impl ::std::clone::Clone for NvFragmentShaderBarycentricFn {
fn clone(&self) -> Self {
NvFragmentShaderBarycentricFn {}
}
}
impl NvFragmentShaderBarycentricFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvFragmentShaderBarycentricFn {}
}
}
#[doc = "Generated from \'VK_NV_fragment_shader_barycentric\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV: Self =
StructureType(1000203000);
}
impl NvShaderImageFootprintFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_shader_image_footprint\0")
.expect("Wrong extension string")
}
}
pub struct NvShaderImageFootprintFn {}
unsafe impl Send for NvShaderImageFootprintFn {}
unsafe impl Sync for NvShaderImageFootprintFn {}
impl ::std::clone::Clone for NvShaderImageFootprintFn {
fn clone(&self) -> Self {
NvShaderImageFootprintFn {}
}
}
impl NvShaderImageFootprintFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvShaderImageFootprintFn {}
}
}
#[doc = "Generated from \'VK_NV_shader_image_footprint\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: Self = StructureType(1000204000);
}
impl NvScissorExclusiveFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_scissor_exclusive\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetExclusiveScissorNV = extern "system" fn(
command_buffer: CommandBuffer,
first_exclusive_scissor: u32,
exclusive_scissor_count: u32,
p_exclusive_scissors: *const Rect2D,
) -> c_void;
pub struct NvScissorExclusiveFn {
pub cmd_set_exclusive_scissor_nv: extern "system" fn(
command_buffer: CommandBuffer,
first_exclusive_scissor: u32,
exclusive_scissor_count: u32,
p_exclusive_scissors: *const Rect2D,
) -> c_void,
}
unsafe impl Send for NvScissorExclusiveFn {}
unsafe impl Sync for NvScissorExclusiveFn {}
impl ::std::clone::Clone for NvScissorExclusiveFn {
fn clone(&self) -> Self {
NvScissorExclusiveFn {
cmd_set_exclusive_scissor_nv: self.cmd_set_exclusive_scissor_nv,
}
}
}
impl NvScissorExclusiveFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvScissorExclusiveFn {
cmd_set_exclusive_scissor_nv: unsafe {
extern "system" fn cmd_set_exclusive_scissor_nv(
_command_buffer: CommandBuffer,
_first_exclusive_scissor: u32,
_exclusive_scissor_count: u32,
_p_exclusive_scissors: *const Rect2D,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_exclusive_scissor_nv)
))
}
let raw_name = stringify!(vkCmdSetExclusiveScissorNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_exclusive_scissor_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetExclusiveScissorNV.html>"]
pub unsafe fn cmd_set_exclusive_scissor_nv(
&self,
command_buffer: CommandBuffer,
first_exclusive_scissor: u32,
exclusive_scissor_count: u32,
p_exclusive_scissors: *const Rect2D,
) -> c_void {
(self.cmd_set_exclusive_scissor_nv)(
command_buffer,
first_exclusive_scissor,
exclusive_scissor_count,
p_exclusive_scissors,
)
}
}
#[doc = "Generated from \'VK_NV_scissor_exclusive\'"]
impl StructureType {
pub const PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: Self =
StructureType(1000205000);
}
#[doc = "Generated from \'VK_NV_scissor_exclusive\'"]
impl DynamicState {
pub const EXCLUSIVE_SCISSOR_NV: Self = DynamicState(1000205001);
}
#[doc = "Generated from \'VK_NV_scissor_exclusive\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: Self = StructureType(1000205002);
}
impl NvDeviceDiagnosticCheckpointsFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_device_diagnostic_checkpoints\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCmdSetCheckpointNV =
extern "system" fn(command_buffer: CommandBuffer, p_checkpoint_marker: *const c_void) -> c_void;
#[allow(non_camel_case_types)]
pub type PFN_vkGetQueueCheckpointDataNV = extern "system" fn(
queue: Queue,
p_checkpoint_data_count: *mut u32,
p_checkpoint_data: *mut CheckpointDataNV,
) -> c_void;
pub struct NvDeviceDiagnosticCheckpointsFn {
pub cmd_set_checkpoint_nv: extern "system" fn(
command_buffer: CommandBuffer,
p_checkpoint_marker: *const c_void,
) -> c_void,
pub get_queue_checkpoint_data_nv: extern "system" fn(
queue: Queue,
p_checkpoint_data_count: *mut u32,
p_checkpoint_data: *mut CheckpointDataNV,
) -> c_void,
}
unsafe impl Send for NvDeviceDiagnosticCheckpointsFn {}
unsafe impl Sync for NvDeviceDiagnosticCheckpointsFn {}
impl ::std::clone::Clone for NvDeviceDiagnosticCheckpointsFn {
fn clone(&self) -> Self {
NvDeviceDiagnosticCheckpointsFn {
cmd_set_checkpoint_nv: self.cmd_set_checkpoint_nv,
get_queue_checkpoint_data_nv: self.get_queue_checkpoint_data_nv,
}
}
}
impl NvDeviceDiagnosticCheckpointsFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvDeviceDiagnosticCheckpointsFn {
cmd_set_checkpoint_nv: unsafe {
extern "system" fn cmd_set_checkpoint_nv(
_command_buffer: CommandBuffer,
_p_checkpoint_marker: *const c_void,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(cmd_set_checkpoint_nv)
))
}
let raw_name = stringify!(vkCmdSetCheckpointNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
cmd_set_checkpoint_nv
} else {
::std::mem::transmute(val)
}
},
get_queue_checkpoint_data_nv: unsafe {
extern "system" fn get_queue_checkpoint_data_nv(
_queue: Queue,
_p_checkpoint_data_count: *mut u32,
_p_checkpoint_data: *mut CheckpointDataNV,
) -> c_void {
panic!(concat!(
"Unable to load ",
stringify!(get_queue_checkpoint_data_nv)
))
}
let raw_name = stringify!(vkGetQueueCheckpointDataNV);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_queue_checkpoint_data_nv
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCmdSetCheckpointNV.html>"]
pub unsafe fn cmd_set_checkpoint_nv(
&self,
command_buffer: CommandBuffer,
p_checkpoint_marker: *const c_void,
) -> c_void {
(self.cmd_set_checkpoint_nv)(command_buffer, p_checkpoint_marker)
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetQueueCheckpointDataNV.html>"]
pub unsafe fn get_queue_checkpoint_data_nv(
&self,
queue: Queue,
p_checkpoint_data_count: *mut u32,
p_checkpoint_data: *mut CheckpointDataNV,
) -> c_void {
(self.get_queue_checkpoint_data_nv)(queue, p_checkpoint_data_count, p_checkpoint_data)
}
}
#[doc = "Generated from \'VK_NV_device_diagnostic_checkpoints\'"]
impl StructureType {
pub const CHECKPOINT_DATA_NV: Self = StructureType(1000206000);
}
#[doc = "Generated from \'VK_NV_device_diagnostic_checkpoints\'"]
impl StructureType {
pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: Self = StructureType(1000206001);
}
impl KhrExtension208Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_208\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension208Fn {}
unsafe impl Send for KhrExtension208Fn {}
unsafe impl Sync for KhrExtension208Fn {}
impl ::std::clone::Clone for KhrExtension208Fn {
fn clone(&self) -> Self {
KhrExtension208Fn {}
}
}
impl KhrExtension208Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension208Fn {}
}
}
impl KhrExtension209Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_209\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension209Fn {}
unsafe impl Send for KhrExtension209Fn {}
unsafe impl Sync for KhrExtension209Fn {}
impl ::std::clone::Clone for KhrExtension209Fn {
fn clone(&self) -> Self {
KhrExtension209Fn {}
}
}
impl KhrExtension209Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension209Fn {}
}
}
impl IntelExtension210Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_INTEL_extension_210\0")
.expect("Wrong extension string")
}
}
pub struct IntelExtension210Fn {}
unsafe impl Send for IntelExtension210Fn {}
unsafe impl Sync for IntelExtension210Fn {}
impl ::std::clone::Clone for IntelExtension210Fn {
fn clone(&self) -> Self {
IntelExtension210Fn {}
}
}
impl IntelExtension210Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
IntelExtension210Fn {}
}
}
impl IntelExtension211Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_INTEL_extension_211\0")
.expect("Wrong extension string")
}
}
pub struct IntelExtension211Fn {}
unsafe impl Send for IntelExtension211Fn {}
unsafe impl Sync for IntelExtension211Fn {}
impl ::std::clone::Clone for IntelExtension211Fn {
fn clone(&self) -> Self {
IntelExtension211Fn {}
}
}
impl IntelExtension211Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
IntelExtension211Fn {}
}
}
impl KhrVulkanMemoryModelFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_vulkan_memory_model\0")
.expect("Wrong extension string")
}
}
pub struct KhrVulkanMemoryModelFn {}
unsafe impl Send for KhrVulkanMemoryModelFn {}
unsafe impl Sync for KhrVulkanMemoryModelFn {}
impl ::std::clone::Clone for KhrVulkanMemoryModelFn {
fn clone(&self) -> Self {
KhrVulkanMemoryModelFn {}
}
}
impl KhrVulkanMemoryModelFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrVulkanMemoryModelFn {}
}
}
#[doc = "Generated from \'VK_KHR_vulkan_memory_model\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: Self = StructureType(1000211000);
}
impl ExtPciBusInfoFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_pci_bus_info\0")
.expect("Wrong extension string")
}
}
pub struct ExtPciBusInfoFn {}
unsafe impl Send for ExtPciBusInfoFn {}
unsafe impl Sync for ExtPciBusInfoFn {}
impl ::std::clone::Clone for ExtPciBusInfoFn {
fn clone(&self) -> Self {
ExtPciBusInfoFn {}
}
}
impl ExtPciBusInfoFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtPciBusInfoFn {}
}
}
#[doc = "Generated from \'VK_EXT_pci_bus_info\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT: Self = StructureType(1000212000);
}
impl AmdExtension214Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_214\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension214Fn {}
unsafe impl Send for AmdExtension214Fn {}
unsafe impl Sync for AmdExtension214Fn {}
impl ::std::clone::Clone for AmdExtension214Fn {
fn clone(&self) -> Self {
AmdExtension214Fn {}
}
}
impl AmdExtension214Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension214Fn {}
}
}
impl FuchsiaImagepipeSurfaceFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_FUCHSIA_imagepipe_surface\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkCreateImagePipeSurfaceFUCHSIA = extern "system" fn(
instance: Instance,
p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result;
pub struct FuchsiaImagepipeSurfaceFn {
pub create_image_pipe_surface_fuchsia: extern "system" fn(
instance: Instance,
p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result,
}
unsafe impl Send for FuchsiaImagepipeSurfaceFn {}
unsafe impl Sync for FuchsiaImagepipeSurfaceFn {}
impl ::std::clone::Clone for FuchsiaImagepipeSurfaceFn {
fn clone(&self) -> Self {
FuchsiaImagepipeSurfaceFn {
create_image_pipe_surface_fuchsia: self.create_image_pipe_surface_fuchsia,
}
}
}
impl FuchsiaImagepipeSurfaceFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
FuchsiaImagepipeSurfaceFn {
create_image_pipe_surface_fuchsia: unsafe {
extern "system" fn create_image_pipe_surface_fuchsia(
_instance: Instance,
_p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
_p_allocator: *const AllocationCallbacks,
_p_surface: *mut SurfaceKHR,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_image_pipe_surface_fuchsia)
))
}
let raw_name = stringify!(vkCreateImagePipeSurfaceFUCHSIA);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
create_image_pipe_surface_fuchsia
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkCreateImagePipeSurfaceFUCHSIA.html>"]
pub unsafe fn create_image_pipe_surface_fuchsia(
&self,
instance: Instance,
p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
p_allocator: *const AllocationCallbacks,
p_surface: *mut SurfaceKHR,
) -> Result {
(self.create_image_pipe_surface_fuchsia)(instance, p_create_info, p_allocator, p_surface)
}
}
#[doc = "Generated from \'VK_FUCHSIA_imagepipe_surface\'"]
impl StructureType {
pub const IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: Self = StructureType(1000214000);
}
impl GoogleExtension216Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_216\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension216Fn {}
unsafe impl Send for GoogleExtension216Fn {}
unsafe impl Sync for GoogleExtension216Fn {}
impl ::std::clone::Clone for GoogleExtension216Fn {
fn clone(&self) -> Self {
GoogleExtension216Fn {}
}
}
impl GoogleExtension216Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension216Fn {}
}
}
impl GoogleExtension217Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_extension_217\0")
.expect("Wrong extension string")
}
}
pub struct GoogleExtension217Fn {}
unsafe impl Send for GoogleExtension217Fn {}
unsafe impl Sync for GoogleExtension217Fn {}
impl ::std::clone::Clone for GoogleExtension217Fn {
fn clone(&self) -> Self {
GoogleExtension217Fn {}
}
}
impl GoogleExtension217Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleExtension217Fn {}
}
}
impl ExtMacosIosWindowFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_macos_ios_window\0")
.expect("Wrong extension string")
}
}
pub struct ExtMacosIosWindowFn {}
unsafe impl Send for ExtMacosIosWindowFn {}
unsafe impl Sync for ExtMacosIosWindowFn {}
impl ::std::clone::Clone for ExtMacosIosWindowFn {
fn clone(&self) -> Self {
ExtMacosIosWindowFn {}
}
}
impl ExtMacosIosWindowFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtMacosIosWindowFn {}
}
}
impl ExtFragmentDensityMapFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_fragment_density_map\0")
.expect("Wrong extension string")
}
}
pub struct ExtFragmentDensityMapFn {}
unsafe impl Send for ExtFragmentDensityMapFn {}
unsafe impl Sync for ExtFragmentDensityMapFn {}
impl ::std::clone::Clone for ExtFragmentDensityMapFn {
fn clone(&self) -> Self {
ExtFragmentDensityMapFn {}
}
}
impl ExtFragmentDensityMapFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtFragmentDensityMapFn {}
}
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: Self = StructureType(1000218000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: Self = StructureType(1000218001);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl StructureType {
pub const RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: Self = StructureType(1000218002);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl ImageCreateFlags {
pub const SUBSAMPLED_EXT: Self = ImageCreateFlags(0b100000000000000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl ImageLayout {
pub const FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: Self = ImageLayout(1000218000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl AccessFlags {
pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = AccessFlags(0b1000000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl FormatFeatureFlags {
pub const FRAGMENT_DENSITY_MAP_EXT: Self = FormatFeatureFlags(0b1000000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl ImageUsageFlags {
pub const FRAGMENT_DENSITY_MAP_EXT: Self = ImageUsageFlags(0b1000000000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl ImageViewCreateFlags {
pub const FRAGMENT_DENSITY_MAP_DYNAMIC_EXT: Self = ImageViewCreateFlags(0b1);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl PipelineStageFlags {
pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = PipelineStageFlags(0b100000000000000000000000);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl SamplerCreateFlags {
pub const SUBSAMPLED_EXT: Self = SamplerCreateFlags(0b1);
}
#[doc = "Generated from \'VK_EXT_fragment_density_map\'"]
impl SamplerCreateFlags {
pub const SUBSAMPLED_COARSE_RECONSTRUCTION_EXT: Self = SamplerCreateFlags(0b10);
}
impl ExtExtension220Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_220\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension220Fn {}
unsafe impl Send for ExtExtension220Fn {}
unsafe impl Sync for ExtExtension220Fn {}
impl ::std::clone::Clone for ExtExtension220Fn {
fn clone(&self) -> Self {
ExtExtension220Fn {}
}
}
impl ExtExtension220Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension220Fn {}
}
}
impl KhrExtension221Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_221\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension221Fn {}
unsafe impl Send for KhrExtension221Fn {}
unsafe impl Sync for KhrExtension221Fn {}
impl ::std::clone::Clone for KhrExtension221Fn {
fn clone(&self) -> Self {
KhrExtension221Fn {}
}
}
impl KhrExtension221Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension221Fn {}
}
}
#[doc = "Generated from \'VK_KHR_extension_221\'"]
impl RenderPassCreateFlags {
pub const RESERVED_0_KHR: Self = RenderPassCreateFlags(0b1);
}
impl ExtScalarBlockLayoutFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_scalar_block_layout\0")
.expect("Wrong extension string")
}
}
pub struct ExtScalarBlockLayoutFn {}
unsafe impl Send for ExtScalarBlockLayoutFn {}
unsafe impl Sync for ExtScalarBlockLayoutFn {}
impl ::std::clone::Clone for ExtScalarBlockLayoutFn {
fn clone(&self) -> Self {
ExtScalarBlockLayoutFn {}
}
}
impl ExtScalarBlockLayoutFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtScalarBlockLayoutFn {}
}
}
#[doc = "Generated from \'VK_EXT_scalar_block_layout\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: Self = StructureType(1000221000);
}
impl ExtExtension223Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_223\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension223Fn {}
unsafe impl Send for ExtExtension223Fn {}
unsafe impl Sync for ExtExtension223Fn {}
impl ::std::clone::Clone for ExtExtension223Fn {
fn clone(&self) -> Self {
ExtExtension223Fn {}
}
}
impl ExtExtension223Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension223Fn {}
}
}
impl GoogleHlslFunctionality1Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_hlsl_functionality1\0")
.expect("Wrong extension string")
}
}
pub struct GoogleHlslFunctionality1Fn {}
unsafe impl Send for GoogleHlslFunctionality1Fn {}
unsafe impl Sync for GoogleHlslFunctionality1Fn {}
impl ::std::clone::Clone for GoogleHlslFunctionality1Fn {
fn clone(&self) -> Self {
GoogleHlslFunctionality1Fn {}
}
}
impl GoogleHlslFunctionality1Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleHlslFunctionality1Fn {}
}
}
impl GoogleDecorateStringFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_GOOGLE_decorate_string\0")
.expect("Wrong extension string")
}
}
pub struct GoogleDecorateStringFn {}
unsafe impl Send for GoogleDecorateStringFn {}
unsafe impl Sync for GoogleDecorateStringFn {}
impl ::std::clone::Clone for GoogleDecorateStringFn {
fn clone(&self) -> Self {
GoogleDecorateStringFn {}
}
}
impl GoogleDecorateStringFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
GoogleDecorateStringFn {}
}
}
impl AmdExtension226Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_226\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension226Fn {}
unsafe impl Send for AmdExtension226Fn {}
unsafe impl Sync for AmdExtension226Fn {}
impl ::std::clone::Clone for AmdExtension226Fn {
fn clone(&self) -> Self {
AmdExtension226Fn {}
}
}
impl AmdExtension226Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension226Fn {}
}
}
impl AmdExtension227Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_227\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension227Fn {}
unsafe impl Send for AmdExtension227Fn {}
unsafe impl Sync for AmdExtension227Fn {}
impl ::std::clone::Clone for AmdExtension227Fn {
fn clone(&self) -> Self {
AmdExtension227Fn {}
}
}
impl AmdExtension227Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension227Fn {}
}
}
impl AmdExtension228Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_228\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension228Fn {}
unsafe impl Send for AmdExtension228Fn {}
unsafe impl Sync for AmdExtension228Fn {}
impl ::std::clone::Clone for AmdExtension228Fn {
fn clone(&self) -> Self {
AmdExtension228Fn {}
}
}
impl AmdExtension228Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension228Fn {}
}
}
impl AmdExtension229Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_229\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension229Fn {}
unsafe impl Send for AmdExtension229Fn {}
unsafe impl Sync for AmdExtension229Fn {}
impl ::std::clone::Clone for AmdExtension229Fn {
fn clone(&self) -> Self {
AmdExtension229Fn {}
}
}
impl AmdExtension229Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension229Fn {}
}
}
impl AmdExtension230Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_230\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension230Fn {}
unsafe impl Send for AmdExtension230Fn {}
unsafe impl Sync for AmdExtension230Fn {}
impl ::std::clone::Clone for AmdExtension230Fn {
fn clone(&self) -> Self {
AmdExtension230Fn {}
}
}
impl AmdExtension230Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension230Fn {}
}
}
impl AmdExtension231Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_231\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension231Fn {}
unsafe impl Send for AmdExtension231Fn {}
unsafe impl Sync for AmdExtension231Fn {}
impl ::std::clone::Clone for AmdExtension231Fn {
fn clone(&self) -> Self {
AmdExtension231Fn {}
}
}
impl AmdExtension231Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension231Fn {}
}
}
impl AmdExtension232Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_232\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension232Fn {}
unsafe impl Send for AmdExtension232Fn {}
unsafe impl Sync for AmdExtension232Fn {}
impl ::std::clone::Clone for AmdExtension232Fn {
fn clone(&self) -> Self {
AmdExtension232Fn {}
}
}
impl AmdExtension232Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension232Fn {}
}
}
impl AmdExtension233Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_233\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension233Fn {}
unsafe impl Send for AmdExtension233Fn {}
unsafe impl Sync for AmdExtension233Fn {}
impl ::std::clone::Clone for AmdExtension233Fn {
fn clone(&self) -> Self {
AmdExtension233Fn {}
}
}
impl AmdExtension233Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension233Fn {}
}
}
impl AmdExtension234Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_234\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension234Fn {}
unsafe impl Send for AmdExtension234Fn {}
unsafe impl Sync for AmdExtension234Fn {}
impl ::std::clone::Clone for AmdExtension234Fn {
fn clone(&self) -> Self {
AmdExtension234Fn {}
}
}
impl AmdExtension234Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension234Fn {}
}
}
impl AmdExtension235Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_235\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension235Fn {}
unsafe impl Send for AmdExtension235Fn {}
unsafe impl Sync for AmdExtension235Fn {}
impl ::std::clone::Clone for AmdExtension235Fn {
fn clone(&self) -> Self {
AmdExtension235Fn {}
}
}
impl AmdExtension235Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension235Fn {}
}
}
impl AmdExtension236Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_236\0")
.expect("Wrong extension string")
}
}
pub struct AmdExtension236Fn {}
unsafe impl Send for AmdExtension236Fn {}
unsafe impl Sync for AmdExtension236Fn {}
impl ::std::clone::Clone for AmdExtension236Fn {
fn clone(&self) -> Self {
AmdExtension236Fn {}
}
}
impl AmdExtension236Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
AmdExtension236Fn {}
}
}
impl KhrExtension237Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_237\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension237Fn {}
unsafe impl Send for KhrExtension237Fn {}
unsafe impl Sync for KhrExtension237Fn {}
impl ::std::clone::Clone for KhrExtension237Fn {
fn clone(&self) -> Self {
KhrExtension237Fn {}
}
}
impl KhrExtension237Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension237Fn {}
}
}
impl ExtMemoryBudgetFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_memory_budget\0")
.expect("Wrong extension string")
}
}
pub struct ExtMemoryBudgetFn {}
unsafe impl Send for ExtMemoryBudgetFn {}
unsafe impl Sync for ExtMemoryBudgetFn {}
impl ::std::clone::Clone for ExtMemoryBudgetFn {
fn clone(&self) -> Self {
ExtMemoryBudgetFn {}
}
}
impl ExtMemoryBudgetFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtMemoryBudgetFn {}
}
}
#[doc = "Generated from \'VK_EXT_memory_budget\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: Self = StructureType(1000237000);
}
impl ExtMemoryPriorityFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_memory_priority\0")
.expect("Wrong extension string")
}
}
pub struct ExtMemoryPriorityFn {}
unsafe impl Send for ExtMemoryPriorityFn {}
unsafe impl Sync for ExtMemoryPriorityFn {}
impl ::std::clone::Clone for ExtMemoryPriorityFn {
fn clone(&self) -> Self {
ExtMemoryPriorityFn {}
}
}
impl ExtMemoryPriorityFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtMemoryPriorityFn {}
}
}
#[doc = "Generated from \'VK_EXT_memory_priority\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: Self = StructureType(1000238000);
}
#[doc = "Generated from \'VK_EXT_memory_priority\'"]
impl StructureType {
pub const MEMORY_PRIORITY_ALLOCATE_INFO_EXT: Self = StructureType(1000238001);
}
impl KhrExtension240Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_240\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension240Fn {}
unsafe impl Send for KhrExtension240Fn {}
unsafe impl Sync for KhrExtension240Fn {}
impl ::std::clone::Clone for KhrExtension240Fn {
fn clone(&self) -> Self {
KhrExtension240Fn {}
}
}
impl KhrExtension240Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension240Fn {}
}
}
impl NvExtension241Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_241\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension241Fn {}
unsafe impl Send for NvExtension241Fn {}
unsafe impl Sync for NvExtension241Fn {}
impl ::std::clone::Clone for NvExtension241Fn {
fn clone(&self) -> Self {
NvExtension241Fn {}
}
}
impl NvExtension241Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension241Fn {}
}
}
impl NvExtension242Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_242\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension242Fn {}
unsafe impl Send for NvExtension242Fn {}
unsafe impl Sync for NvExtension242Fn {}
impl ::std::clone::Clone for NvExtension242Fn {
fn clone(&self) -> Self {
NvExtension242Fn {}
}
}
impl NvExtension242Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension242Fn {}
}
}
impl IntelExtension243Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_INTEL_extension_243\0")
.expect("Wrong extension string")
}
}
pub struct IntelExtension243Fn {}
unsafe impl Send for IntelExtension243Fn {}
unsafe impl Sync for IntelExtension243Fn {}
impl ::std::clone::Clone for IntelExtension243Fn {
fn clone(&self) -> Self {
IntelExtension243Fn {}
}
}
impl IntelExtension243Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
IntelExtension243Fn {}
}
}
impl MesaExtension244Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_MESA_extension_244\0")
.expect("Wrong extension string")
}
}
pub struct MesaExtension244Fn {}
unsafe impl Send for MesaExtension244Fn {}
unsafe impl Sync for MesaExtension244Fn {}
impl ::std::clone::Clone for MesaExtension244Fn {
fn clone(&self) -> Self {
MesaExtension244Fn {}
}
}
impl MesaExtension244Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
MesaExtension244Fn {}
}
}
impl ExtBufferDeviceAddressFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_buffer_device_address\0")
.expect("Wrong extension string")
}
}
#[allow(non_camel_case_types)]
pub type PFN_vkGetBufferDeviceAddressEXT =
extern "system" fn(device: Device, p_info: *const BufferDeviceAddressInfoEXT) -> DeviceAddress;
pub struct ExtBufferDeviceAddressFn {
pub get_buffer_device_address_ext: extern "system" fn(
device: Device,
p_info: *const BufferDeviceAddressInfoEXT,
) -> DeviceAddress,
}
unsafe impl Send for ExtBufferDeviceAddressFn {}
unsafe impl Sync for ExtBufferDeviceAddressFn {}
impl ::std::clone::Clone for ExtBufferDeviceAddressFn {
fn clone(&self) -> Self {
ExtBufferDeviceAddressFn {
get_buffer_device_address_ext: self.get_buffer_device_address_ext,
}
}
}
impl ExtBufferDeviceAddressFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtBufferDeviceAddressFn {
get_buffer_device_address_ext: unsafe {
extern "system" fn get_buffer_device_address_ext(
_device: Device,
_p_info: *const BufferDeviceAddressInfoEXT,
) -> DeviceAddress {
panic!(concat!(
"Unable to load ",
stringify!(get_buffer_device_address_ext)
))
}
let raw_name = stringify!(vkGetBufferDeviceAddressEXT);
let cname = ::std::ffi::CString::new(raw_name).unwrap();
let val = _f(&cname);
if val.is_null() {
get_buffer_device_address_ext
} else {
::std::mem::transmute(val)
}
},
}
}
#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkGetBufferDeviceAddressEXT.html>"]
pub unsafe fn get_buffer_device_address_ext(
&self,
device: Device,
p_info: *const BufferDeviceAddressInfoEXT,
) -> DeviceAddress {
(self.get_buffer_device_address_ext)(device, p_info)
}
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT: Self = StructureType(1000244000);
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl StructureType {
pub const BUFFER_DEVICE_ADDRESS_INFO_EXT: Self = StructureType(1000244001);
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl StructureType {
pub const BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: Self = StructureType(1000244002);
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl BufferUsageFlags {
pub const SHADER_DEVICE_ADDRESS_EXT: Self = BufferUsageFlags(0b100000000000000000);
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl BufferCreateFlags {
pub const DEVICE_ADDRESS_CAPTURE_REPLAY_EXT: Self = BufferCreateFlags(0b10000);
}
#[doc = "Generated from \'VK_EXT_buffer_device_address\'"]
impl Result {
pub const ERROR_INVALID_DEVICE_ADDRESS_EXT: Self = Result(-1000244000);
}
impl ExtExtension246Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_extension_246\0")
.expect("Wrong extension string")
}
}
pub struct ExtExtension246Fn {}
unsafe impl Send for ExtExtension246Fn {}
unsafe impl Sync for ExtExtension246Fn {}
impl ::std::clone::Clone for ExtExtension246Fn {
fn clone(&self) -> Self {
ExtExtension246Fn {}
}
}
impl ExtExtension246Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtExtension246Fn {}
}
}
impl ExtSeparateStencilUsageFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_separate_stencil_usage\0")
.expect("Wrong extension string")
}
}
pub struct ExtSeparateStencilUsageFn {}
unsafe impl Send for ExtSeparateStencilUsageFn {}
unsafe impl Sync for ExtSeparateStencilUsageFn {}
impl ::std::clone::Clone for ExtSeparateStencilUsageFn {
fn clone(&self) -> Self {
ExtSeparateStencilUsageFn {}
}
}
impl ExtSeparateStencilUsageFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtSeparateStencilUsageFn {}
}
}
#[doc = "Generated from \'VK_EXT_separate_stencil_usage\'"]
impl StructureType {
pub const IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: Self = StructureType(1000246000);
}
impl ExtValidationFeaturesFn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_EXT_validation_features\0")
.expect("Wrong extension string")
}
}
pub struct ExtValidationFeaturesFn {}
unsafe impl Send for ExtValidationFeaturesFn {}
unsafe impl Sync for ExtValidationFeaturesFn {}
impl ::std::clone::Clone for ExtValidationFeaturesFn {
fn clone(&self) -> Self {
ExtValidationFeaturesFn {}
}
}
impl ExtValidationFeaturesFn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
ExtValidationFeaturesFn {}
}
}
#[doc = "Generated from \'VK_EXT_validation_features\'"]
impl StructureType {
pub const VALIDATION_FEATURES_EXT: Self = StructureType(1000247000);
}
impl KhrExtension249Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_extension_249\0")
.expect("Wrong extension string")
}
}
pub struct KhrExtension249Fn {}
unsafe impl Send for KhrExtension249Fn {}
unsafe impl Sync for KhrExtension249Fn {}
impl ::std::clone::Clone for KhrExtension249Fn {
fn clone(&self) -> Self {
KhrExtension249Fn {}
}
}
impl KhrExtension249Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
KhrExtension249Fn {}
}
}
impl NvExtension250Fn {
pub fn name() -> &'static ::std::ffi::CStr {
::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_250\0")
.expect("Wrong extension string")
}
}
pub struct NvExtension250Fn {}
unsafe impl Send for NvExtension250Fn {}
unsafe impl Sync for NvExtension250Fn {}
impl ::std::clone::Clone for NvExtension250Fn {
fn clone(&self) -> Self {
NvExtension250Fn {}
}
}
impl NvExtension250Fn {
pub fn load<F>(mut _f: F) -> Self
where
F: FnMut(&::std::ffi::CStr) -> *const c_void,
{
NvExtension250Fn {}
}
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: Self = StructureType(1000094000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BIND_BUFFER_MEMORY_INFO: Self = StructureType(1000157000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BIND_IMAGE_MEMORY_INFO: Self = StructureType(1000157001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const ALIAS: Self = ImageCreateFlags(0b10000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: Self = StructureType(1000083000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const MEMORY_DEDICATED_REQUIREMENTS: Self = StructureType(1000127000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const MEMORY_DEDICATED_ALLOCATE_INFO: Self = StructureType(1000127001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const MEMORY_ALLOCATE_FLAGS_INFO: Self = StructureType(1000060000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: Self = StructureType(1000060003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: Self = StructureType(1000060004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_GROUP_SUBMIT_INFO: Self = StructureType(1000060005);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_GROUP_BIND_SPARSE_INFO: Self = StructureType(1000060006);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl PipelineCreateFlags {
pub const VIEW_INDEX_FROM_DEVICE_INDEX: Self = PipelineCreateFlags(0b1000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl PipelineCreateFlags {
pub const DISPATCH_BASE: Self = PipelineCreateFlags(0b10000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl DependencyFlags {
pub const DEVICE_GROUP: Self = DependencyFlags(0b100);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: Self = StructureType(1000060013);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: Self = StructureType(1000060014);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const SPLIT_INSTANCE_BIND_REGIONS: Self = ImageCreateFlags(0b1000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_GROUP_PROPERTIES: Self = StructureType(1000070000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_GROUP_DEVICE_CREATE_INFO: Self = StructureType(1000070001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl MemoryHeapFlags {
pub const MULTI_INSTANCE: Self = MemoryHeapFlags(0b10);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BUFFER_MEMORY_REQUIREMENTS_INFO_2: Self = StructureType(1000146000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const IMAGE_MEMORY_REQUIREMENTS_INFO_2: Self = StructureType(1000146001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: Self = StructureType(1000146002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const MEMORY_REQUIREMENTS_2: Self = StructureType(1000146003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const SPARSE_IMAGE_MEMORY_REQUIREMENTS_2: Self = StructureType(1000146004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_FEATURES_2: Self = StructureType(1000059000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_PROPERTIES_2: Self = StructureType(1000059001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const FORMAT_PROPERTIES_2: Self = StructureType(1000059002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const IMAGE_FORMAT_PROPERTIES_2: Self = StructureType(1000059003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: Self = StructureType(1000059004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const QUEUE_FAMILY_PROPERTIES_2: Self = StructureType(1000059005);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MEMORY_PROPERTIES_2: Self = StructureType(1000059006);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const SPARSE_IMAGE_FORMAT_PROPERTIES_2: Self = StructureType(1000059007);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2: Self = StructureType(1000059008);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Result {
pub const ERROR_OUT_OF_POOL_MEMORY: Self = Result(-1000069000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const TRANSFER_SRC: Self = FormatFeatureFlags(0b100000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const TRANSFER_DST: Self = FormatFeatureFlags(0b1000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const TYPE_2D_ARRAY_COMPATIBLE: Self = ImageCreateFlags(0b100000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const BLOCK_TEXEL_VIEW_COMPATIBLE: Self = ImageCreateFlags(0b10000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const EXTENDED_USAGE: Self = ImageCreateFlags(0b100000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: Self = StructureType(1000117000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: Self = StructureType(1000117001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const IMAGE_VIEW_USAGE_CREATE_INFO: Self = StructureType(1000117002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: Self =
StructureType(1000117003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageLayout {
pub const DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: Self = ImageLayout(1000117000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageLayout {
pub const DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: Self = ImageLayout(1000117001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const RENDER_PASS_MULTIVIEW_CREATE_INFO: Self = StructureType(1000053000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MULTIVIEW_FEATURES: Self = StructureType(1000053001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: Self = StructureType(1000053002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl DependencyFlags {
pub const VIEW_LOCAL: Self = DependencyFlags(0b10);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES: Self = StructureType(1000120000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PROTECTED_SUBMIT_INFO: Self = StructureType(1000145000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: Self = StructureType(1000145001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: Self = StructureType(1000145002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DEVICE_QUEUE_INFO_2: Self = StructureType(1000145003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl QueueFlags {
pub const PROTECTED: Self = QueueFlags(0b10000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl DeviceQueueCreateFlags {
pub const PROTECTED: Self = DeviceQueueCreateFlags(0b1);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl MemoryPropertyFlags {
pub const PROTECTED: Self = MemoryPropertyFlags(0b100000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl BufferCreateFlags {
pub const PROTECTED: Self = BufferCreateFlags(0b1000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const PROTECTED: Self = ImageCreateFlags(0b100000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl CommandPoolCreateFlags {
pub const PROTECTED: Self = CommandPoolCreateFlags(0b100);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const SAMPLER_YCBCR_CONVERSION_CREATE_INFO: Self = StructureType(1000156000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const SAMPLER_YCBCR_CONVERSION_INFO: Self = StructureType(1000156001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const BIND_IMAGE_PLANE_MEMORY_INFO: Self = StructureType(1000156002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: Self = StructureType(1000156003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: Self = StructureType(1000156004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: Self = StructureType(1000156005);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ObjectType {
pub const SAMPLER_YCBCR_CONVERSION: Self = ObjectType(1000156000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8B8G8R8_422_UNORM: Self = Format(1000156000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const B8G8R8G8_422_UNORM: Self = Format(1000156001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8_B8_R8_3PLANE_420_UNORM: Self = Format(1000156002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8_B8R8_2PLANE_420_UNORM: Self = Format(1000156003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8_B8_R8_3PLANE_422_UNORM: Self = Format(1000156004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8_B8R8_2PLANE_422_UNORM: Self = Format(1000156005);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G8_B8_R8_3PLANE_444_UNORM: Self = Format(1000156006);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R10X6_UNORM_PACK16: Self = Format(1000156007);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R10X6G10X6_UNORM_2PACK16: Self = Format(1000156008);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R10X6G10X6B10X6A10X6_UNORM_4PACK16: Self = Format(1000156009);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: Self = Format(1000156010);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: Self = Format(1000156011);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: Self = Format(1000156012);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: Self = Format(1000156013);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: Self = Format(1000156014);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: Self = Format(1000156015);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: Self = Format(1000156016);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R12X4_UNORM_PACK16: Self = Format(1000156017);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R12X4G12X4_UNORM_2PACK16: Self = Format(1000156018);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const R12X4G12X4B12X4A12X4_UNORM_4PACK16: Self = Format(1000156019);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: Self = Format(1000156020);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: Self = Format(1000156021);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: Self = Format(1000156022);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: Self = Format(1000156023);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: Self = Format(1000156024);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: Self = Format(1000156025);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: Self = Format(1000156026);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16B16G16R16_422_UNORM: Self = Format(1000156027);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const B16G16R16G16_422_UNORM: Self = Format(1000156028);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16_B16_R16_3PLANE_420_UNORM: Self = Format(1000156029);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16_B16R16_2PLANE_420_UNORM: Self = Format(1000156030);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16_B16_R16_3PLANE_422_UNORM: Self = Format(1000156031);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16_B16R16_2PLANE_422_UNORM: Self = Format(1000156032);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Format {
pub const G16_B16_R16_3PLANE_444_UNORM: Self = Format(1000156033);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageAspectFlags {
pub const PLANE_0: Self = ImageAspectFlags(0b10000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageAspectFlags {
pub const PLANE_1: Self = ImageAspectFlags(0b100000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageAspectFlags {
pub const PLANE_2: Self = ImageAspectFlags(0b1000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ImageCreateFlags {
pub const DISJOINT: Self = ImageCreateFlags(0b1000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const MIDPOINT_CHROMA_SAMPLES: Self = FormatFeatureFlags(0b100000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER: Self =
FormatFeatureFlags(0b1000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER: Self =
FormatFeatureFlags(0b10000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT: Self =
FormatFeatureFlags(0b100000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE: Self =
FormatFeatureFlags(0b1000000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const DISJOINT: Self = FormatFeatureFlags(0b10000000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl FormatFeatureFlags {
pub const COSITED_CHROMA_SAMPLES: Self = FormatFeatureFlags(0b100000000000000000000000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO: Self = StructureType(1000085000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl ObjectType {
pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = ObjectType(1000085000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: Self = StructureType(1000071000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_IMAGE_FORMAT_PROPERTIES: Self = StructureType(1000071001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: Self = StructureType(1000071002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_BUFFER_PROPERTIES: Self = StructureType(1000071003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_ID_PROPERTIES: Self = StructureType(1000071004);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_MEMORY_BUFFER_CREATE_INFO: Self = StructureType(1000072000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO: Self = StructureType(1000072001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXPORT_MEMORY_ALLOCATE_INFO: Self = StructureType(1000072002);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl Result {
pub const ERROR_INVALID_EXTERNAL_HANDLE: Self = Result(-1000072003);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: Self = StructureType(1000112000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_FENCE_PROPERTIES: Self = StructureType(1000112001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXPORT_FENCE_CREATE_INFO: Self = StructureType(1000113000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXPORT_SEMAPHORE_CREATE_INFO: Self = StructureType(1000077000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO: Self = StructureType(1000076000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const EXTERNAL_SEMAPHORE_PROPERTIES: Self = StructureType(1000076001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: Self = StructureType(1000168000);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const DESCRIPTOR_SET_LAYOUT_SUPPORT: Self = StructureType(1000168001);
}
#[doc = "Generated from \'VK_VERSION_1_1\'"]
impl StructureType {
pub const PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES: Self = StructureType(1000063000);
}
pub(crate) fn debug_flags(
f: &mut fmt::Formatter,
known: &[(Flags, &'static str)],
value: Flags,
) -> fmt::Result {
let mut first = true;
let mut accum = value;
for (bit, name) in known {
if *bit != 0 && accum & *bit == *bit {
if !first {
f.write_str(" | ")?;
}
f.write_str(name)?;
first = false;
accum &= !bit;
}
}
if accum != 0 {
if !first {
f.write_str(" | ")?;
}
write!(f, "{:b}", accum)?;
}
Ok(())
}
impl fmt::Debug for AccelerationStructureMemoryRequirementsTypeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::OBJECT => Some("OBJECT"),
Self::BUILD_SCRATCH => Some("BUILD_SCRATCH"),
Self::UPDATE_SCRATCH => Some("UPDATE_SCRATCH"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for AccelerationStructureTypeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::TOP_LEVEL => Some("TOP_LEVEL"),
Self::BOTTOM_LEVEL => Some("BOTTOM_LEVEL"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for AccessFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
AccessFlags::INDIRECT_COMMAND_READ.0,
"INDIRECT_COMMAND_READ",
),
(AccessFlags::INDEX_READ.0, "INDEX_READ"),
(
AccessFlags::VERTEX_ATTRIBUTE_READ.0,
"VERTEX_ATTRIBUTE_READ",
),
(AccessFlags::UNIFORM_READ.0, "UNIFORM_READ"),
(
AccessFlags::INPUT_ATTACHMENT_READ.0,
"INPUT_ATTACHMENT_READ",
),
(AccessFlags::SHADER_READ.0, "SHADER_READ"),
(AccessFlags::SHADER_WRITE.0, "SHADER_WRITE"),
(
AccessFlags::COLOR_ATTACHMENT_READ.0,
"COLOR_ATTACHMENT_READ",
),
(
AccessFlags::COLOR_ATTACHMENT_WRITE.0,
"COLOR_ATTACHMENT_WRITE",
),
(
AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ.0,
"DEPTH_STENCIL_ATTACHMENT_READ",
),
(
AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE.0,
"DEPTH_STENCIL_ATTACHMENT_WRITE",
),
(AccessFlags::TRANSFER_READ.0, "TRANSFER_READ"),
(AccessFlags::TRANSFER_WRITE.0, "TRANSFER_WRITE"),
(AccessFlags::HOST_READ.0, "HOST_READ"),
(AccessFlags::HOST_WRITE.0, "HOST_WRITE"),
(AccessFlags::MEMORY_READ.0, "MEMORY_READ"),
(AccessFlags::MEMORY_WRITE.0, "MEMORY_WRITE"),
(AccessFlags::RESERVED_30_KHR.0, "RESERVED_30_KHR"),
(AccessFlags::RESERVED_31_KHR.0, "RESERVED_31_KHR"),
(AccessFlags::RESERVED_28_KHR.0, "RESERVED_28_KHR"),
(AccessFlags::RESERVED_29_KHR.0, "RESERVED_29_KHR"),
(
AccessFlags::TRANSFORM_FEEDBACK_WRITE_EXT.0,
"TRANSFORM_FEEDBACK_WRITE_EXT",
),
(
AccessFlags::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0,
"TRANSFORM_FEEDBACK_COUNTER_READ_EXT",
),
(
AccessFlags::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0,
"TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT",
),
(
AccessFlags::CONDITIONAL_RENDERING_READ_EXT.0,
"CONDITIONAL_RENDERING_READ_EXT",
),
(
AccessFlags::COMMAND_PROCESS_READ_NVX.0,
"COMMAND_PROCESS_READ_NVX",
),
(
AccessFlags::COMMAND_PROCESS_WRITE_NVX.0,
"COMMAND_PROCESS_WRITE_NVX",
),
(
AccessFlags::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0,
"COLOR_ATTACHMENT_READ_NONCOHERENT_EXT",
),
(
AccessFlags::SHADING_RATE_IMAGE_READ_NV.0,
"SHADING_RATE_IMAGE_READ_NV",
),
(
AccessFlags::ACCELERATION_STRUCTURE_READ_NV.0,
"ACCELERATION_STRUCTURE_READ_NV",
),
(
AccessFlags::ACCELERATION_STRUCTURE_WRITE_NV.0,
"ACCELERATION_STRUCTURE_WRITE_NV",
),
(
AccessFlags::FRAGMENT_DENSITY_MAP_READ_EXT.0,
"FRAGMENT_DENSITY_MAP_READ_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for AndroidSurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for AttachmentDescriptionFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(AttachmentDescriptionFlags::MAY_ALIAS.0, "MAY_ALIAS")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for AttachmentLoadOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::LOAD => Some("LOAD"),
Self::CLEAR => Some("CLEAR"),
Self::DONT_CARE => Some("DONT_CARE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for AttachmentStoreOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::STORE => Some("STORE"),
Self::DONT_CARE => Some("DONT_CARE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for BlendFactor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ZERO => Some("ZERO"),
Self::ONE => Some("ONE"),
Self::SRC_COLOR => Some("SRC_COLOR"),
Self::ONE_MINUS_SRC_COLOR => Some("ONE_MINUS_SRC_COLOR"),
Self::DST_COLOR => Some("DST_COLOR"),
Self::ONE_MINUS_DST_COLOR => Some("ONE_MINUS_DST_COLOR"),
Self::SRC_ALPHA => Some("SRC_ALPHA"),
Self::ONE_MINUS_SRC_ALPHA => Some("ONE_MINUS_SRC_ALPHA"),
Self::DST_ALPHA => Some("DST_ALPHA"),
Self::ONE_MINUS_DST_ALPHA => Some("ONE_MINUS_DST_ALPHA"),
Self::CONSTANT_COLOR => Some("CONSTANT_COLOR"),
Self::ONE_MINUS_CONSTANT_COLOR => Some("ONE_MINUS_CONSTANT_COLOR"),
Self::CONSTANT_ALPHA => Some("CONSTANT_ALPHA"),
Self::ONE_MINUS_CONSTANT_ALPHA => Some("ONE_MINUS_CONSTANT_ALPHA"),
Self::SRC_ALPHA_SATURATE => Some("SRC_ALPHA_SATURATE"),
Self::SRC1_COLOR => Some("SRC1_COLOR"),
Self::ONE_MINUS_SRC1_COLOR => Some("ONE_MINUS_SRC1_COLOR"),
Self::SRC1_ALPHA => Some("SRC1_ALPHA"),
Self::ONE_MINUS_SRC1_ALPHA => Some("ONE_MINUS_SRC1_ALPHA"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for BlendOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ADD => Some("ADD"),
Self::SUBTRACT => Some("SUBTRACT"),
Self::REVERSE_SUBTRACT => Some("REVERSE_SUBTRACT"),
Self::MIN => Some("MIN"),
Self::MAX => Some("MAX"),
Self::ZERO_EXT => Some("ZERO_EXT"),
Self::SRC_EXT => Some("SRC_EXT"),
Self::DST_EXT => Some("DST_EXT"),
Self::SRC_OVER_EXT => Some("SRC_OVER_EXT"),
Self::DST_OVER_EXT => Some("DST_OVER_EXT"),
Self::SRC_IN_EXT => Some("SRC_IN_EXT"),
Self::DST_IN_EXT => Some("DST_IN_EXT"),
Self::SRC_OUT_EXT => Some("SRC_OUT_EXT"),
Self::DST_OUT_EXT => Some("DST_OUT_EXT"),
Self::SRC_ATOP_EXT => Some("SRC_ATOP_EXT"),
Self::DST_ATOP_EXT => Some("DST_ATOP_EXT"),
Self::XOR_EXT => Some("XOR_EXT"),
Self::MULTIPLY_EXT => Some("MULTIPLY_EXT"),
Self::SCREEN_EXT => Some("SCREEN_EXT"),
Self::OVERLAY_EXT => Some("OVERLAY_EXT"),
Self::DARKEN_EXT => Some("DARKEN_EXT"),
Self::LIGHTEN_EXT => Some("LIGHTEN_EXT"),
Self::COLORDODGE_EXT => Some("COLORDODGE_EXT"),
Self::COLORBURN_EXT => Some("COLORBURN_EXT"),
Self::HARDLIGHT_EXT => Some("HARDLIGHT_EXT"),
Self::SOFTLIGHT_EXT => Some("SOFTLIGHT_EXT"),
Self::DIFFERENCE_EXT => Some("DIFFERENCE_EXT"),
Self::EXCLUSION_EXT => Some("EXCLUSION_EXT"),
Self::INVERT_EXT => Some("INVERT_EXT"),
Self::INVERT_RGB_EXT => Some("INVERT_RGB_EXT"),
Self::LINEARDODGE_EXT => Some("LINEARDODGE_EXT"),
Self::LINEARBURN_EXT => Some("LINEARBURN_EXT"),
Self::VIVIDLIGHT_EXT => Some("VIVIDLIGHT_EXT"),
Self::LINEARLIGHT_EXT => Some("LINEARLIGHT_EXT"),
Self::PINLIGHT_EXT => Some("PINLIGHT_EXT"),
Self::HARDMIX_EXT => Some("HARDMIX_EXT"),
Self::HSL_HUE_EXT => Some("HSL_HUE_EXT"),
Self::HSL_SATURATION_EXT => Some("HSL_SATURATION_EXT"),
Self::HSL_COLOR_EXT => Some("HSL_COLOR_EXT"),
Self::HSL_LUMINOSITY_EXT => Some("HSL_LUMINOSITY_EXT"),
Self::PLUS_EXT => Some("PLUS_EXT"),
Self::PLUS_CLAMPED_EXT => Some("PLUS_CLAMPED_EXT"),
Self::PLUS_CLAMPED_ALPHA_EXT => Some("PLUS_CLAMPED_ALPHA_EXT"),
Self::PLUS_DARKER_EXT => Some("PLUS_DARKER_EXT"),
Self::MINUS_EXT => Some("MINUS_EXT"),
Self::MINUS_CLAMPED_EXT => Some("MINUS_CLAMPED_EXT"),
Self::CONTRAST_EXT => Some("CONTRAST_EXT"),
Self::INVERT_OVG_EXT => Some("INVERT_OVG_EXT"),
Self::RED_EXT => Some("RED_EXT"),
Self::GREEN_EXT => Some("GREEN_EXT"),
Self::BLUE_EXT => Some("BLUE_EXT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for BlendOverlapEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UNCORRELATED => Some("UNCORRELATED"),
Self::DISJOINT => Some("DISJOINT"),
Self::CONJOINT => Some("CONJOINT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for BorderColor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::FLOAT_TRANSPARENT_BLACK => Some("FLOAT_TRANSPARENT_BLACK"),
Self::INT_TRANSPARENT_BLACK => Some("INT_TRANSPARENT_BLACK"),
Self::FLOAT_OPAQUE_BLACK => Some("FLOAT_OPAQUE_BLACK"),
Self::INT_OPAQUE_BLACK => Some("INT_OPAQUE_BLACK"),
Self::FLOAT_OPAQUE_WHITE => Some("FLOAT_OPAQUE_WHITE"),
Self::INT_OPAQUE_WHITE => Some("INT_OPAQUE_WHITE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for BufferCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(BufferCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
(BufferCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"),
(BufferCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"),
(
BufferCreateFlags::DEVICE_ADDRESS_CAPTURE_REPLAY_EXT.0,
"DEVICE_ADDRESS_CAPTURE_REPLAY_EXT",
),
(BufferCreateFlags::PROTECTED.0, "PROTECTED"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for BufferUsageFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(BufferUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"),
(BufferUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"),
(
BufferUsageFlags::UNIFORM_TEXEL_BUFFER.0,
"UNIFORM_TEXEL_BUFFER",
),
(
BufferUsageFlags::STORAGE_TEXEL_BUFFER.0,
"STORAGE_TEXEL_BUFFER",
),
(BufferUsageFlags::UNIFORM_BUFFER.0, "UNIFORM_BUFFER"),
(BufferUsageFlags::STORAGE_BUFFER.0, "STORAGE_BUFFER"),
(BufferUsageFlags::INDEX_BUFFER.0, "INDEX_BUFFER"),
(BufferUsageFlags::VERTEX_BUFFER.0, "VERTEX_BUFFER"),
(BufferUsageFlags::INDIRECT_BUFFER.0, "INDIRECT_BUFFER"),
(BufferUsageFlags::RESERVED_15_KHR.0, "RESERVED_15_KHR"),
(BufferUsageFlags::RESERVED_16_KHR.0, "RESERVED_16_KHR"),
(BufferUsageFlags::RESERVED_13_KHR.0, "RESERVED_13_KHR"),
(BufferUsageFlags::RESERVED_14_KHR.0, "RESERVED_14_KHR"),
(
BufferUsageFlags::TRANSFORM_FEEDBACK_BUFFER_EXT.0,
"TRANSFORM_FEEDBACK_BUFFER_EXT",
),
(
BufferUsageFlags::TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT.0,
"TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT",
),
(
BufferUsageFlags::CONDITIONAL_RENDERING_EXT.0,
"CONDITIONAL_RENDERING_EXT",
),
(BufferUsageFlags::RAY_TRACING_NV.0, "RAY_TRACING_NV"),
(
BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT.0,
"SHADER_DEVICE_ADDRESS_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for BufferViewCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for BuildAccelerationStructureFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
BuildAccelerationStructureFlagsNV::ALLOW_UPDATE.0,
"ALLOW_UPDATE",
),
(
BuildAccelerationStructureFlagsNV::ALLOW_COMPACTION.0,
"ALLOW_COMPACTION",
),
(
BuildAccelerationStructureFlagsNV::PREFER_FAST_TRACE.0,
"PREFER_FAST_TRACE",
),
(
BuildAccelerationStructureFlagsNV::PREFER_FAST_BUILD.0,
"PREFER_FAST_BUILD",
),
(
BuildAccelerationStructureFlagsNV::LOW_MEMORY.0,
"LOW_MEMORY",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ChromaLocation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::COSITED_EVEN => Some("COSITED_EVEN"),
Self::MIDPOINT => Some("MIDPOINT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CoarseSampleOrderTypeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DEFAULT => Some("DEFAULT"),
Self::CUSTOM => Some("CUSTOM"),
Self::PIXEL_MAJOR => Some("PIXEL_MAJOR"),
Self::SAMPLE_MAJOR => Some("SAMPLE_MAJOR"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ColorComponentFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ColorComponentFlags::R.0, "R"),
(ColorComponentFlags::G.0, "G"),
(ColorComponentFlags::B.0, "B"),
(ColorComponentFlags::A.0, "A"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ColorSpaceKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::SRGB_NONLINEAR => Some("SRGB_NONLINEAR"),
Self::DISPLAY_P3_NONLINEAR_EXT => Some("DISPLAY_P3_NONLINEAR_EXT"),
Self::EXTENDED_SRGB_LINEAR_EXT => Some("EXTENDED_SRGB_LINEAR_EXT"),
Self::DCI_P3_LINEAR_EXT => Some("DCI_P3_LINEAR_EXT"),
Self::DCI_P3_NONLINEAR_EXT => Some("DCI_P3_NONLINEAR_EXT"),
Self::BT709_LINEAR_EXT => Some("BT709_LINEAR_EXT"),
Self::BT709_NONLINEAR_EXT => Some("BT709_NONLINEAR_EXT"),
Self::BT2020_LINEAR_EXT => Some("BT2020_LINEAR_EXT"),
Self::HDR10_ST2084_EXT => Some("HDR10_ST2084_EXT"),
Self::DOLBYVISION_EXT => Some("DOLBYVISION_EXT"),
Self::HDR10_HLG_EXT => Some("HDR10_HLG_EXT"),
Self::ADOBERGB_LINEAR_EXT => Some("ADOBERGB_LINEAR_EXT"),
Self::ADOBERGB_NONLINEAR_EXT => Some("ADOBERGB_NONLINEAR_EXT"),
Self::PASS_THROUGH_EXT => Some("PASS_THROUGH_EXT"),
Self::EXTENDED_SRGB_NONLINEAR_EXT => Some("EXTENDED_SRGB_NONLINEAR_EXT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CommandBufferLevel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::PRIMARY => Some("PRIMARY"),
Self::SECONDARY => Some("SECONDARY"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CommandBufferResetFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(
CommandBufferResetFlags::RELEASE_RESOURCES.0,
"RELEASE_RESOURCES",
)];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for CommandBufferUsageFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
CommandBufferUsageFlags::ONE_TIME_SUBMIT.0,
"ONE_TIME_SUBMIT",
),
(
CommandBufferUsageFlags::RENDER_PASS_CONTINUE.0,
"RENDER_PASS_CONTINUE",
),
(
CommandBufferUsageFlags::SIMULTANEOUS_USE.0,
"SIMULTANEOUS_USE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for CommandPoolCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(CommandPoolCreateFlags::TRANSIENT.0, "TRANSIENT"),
(
CommandPoolCreateFlags::RESET_COMMAND_BUFFER.0,
"RESET_COMMAND_BUFFER",
),
(CommandPoolCreateFlags::PROTECTED.0, "PROTECTED"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for CommandPoolResetFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(
CommandPoolResetFlags::RELEASE_RESOURCES.0,
"RELEASE_RESOURCES",
)];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for CommandPoolTrimFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for CompareOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::NEVER => Some("NEVER"),
Self::LESS => Some("LESS"),
Self::EQUAL => Some("EQUAL"),
Self::LESS_OR_EQUAL => Some("LESS_OR_EQUAL"),
Self::GREATER => Some("GREATER"),
Self::NOT_EQUAL => Some("NOT_EQUAL"),
Self::GREATER_OR_EQUAL => Some("GREATER_OR_EQUAL"),
Self::ALWAYS => Some("ALWAYS"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ComponentSwizzle {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::IDENTITY => Some("IDENTITY"),
Self::ZERO => Some("ZERO"),
Self::ONE => Some("ONE"),
Self::R => Some("R"),
Self::G => Some("G"),
Self::B => Some("B"),
Self::A => Some("A"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CompositeAlphaFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(CompositeAlphaFlagsKHR::OPAQUE.0, "OPAQUE"),
(CompositeAlphaFlagsKHR::PRE_MULTIPLIED.0, "PRE_MULTIPLIED"),
(CompositeAlphaFlagsKHR::POST_MULTIPLIED.0, "POST_MULTIPLIED"),
(CompositeAlphaFlagsKHR::INHERIT.0, "INHERIT"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ConditionalRenderingFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(ConditionalRenderingFlagsEXT::INVERTED.0, "INVERTED")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ConservativeRasterizationModeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DISABLED => Some("DISABLED"),
Self::OVERESTIMATE => Some("OVERESTIMATE"),
Self::UNDERESTIMATE => Some("UNDERESTIMATE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CopyAccelerationStructureModeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::CLONE => Some("CLONE"),
Self::COMPACT => Some("COMPACT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CoverageModulationModeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::NONE => Some("NONE"),
Self::RGB => Some("RGB"),
Self::ALPHA => Some("ALPHA"),
Self::RGBA => Some("RGBA"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for CullModeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(CullModeFlags::NONE.0, "NONE"),
(CullModeFlags::FRONT.0, "FRONT"),
(CullModeFlags::BACK.0, "BACK"),
(CullModeFlags::FRONT_AND_BACK.0, "FRONT_AND_BACK"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DebugReportFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DebugReportFlagsEXT::INFORMATION.0, "INFORMATION"),
(DebugReportFlagsEXT::WARNING.0, "WARNING"),
(
DebugReportFlagsEXT::PERFORMANCE_WARNING.0,
"PERFORMANCE_WARNING",
),
(DebugReportFlagsEXT::ERROR.0, "ERROR"),
(DebugReportFlagsEXT::DEBUG.0, "DEBUG"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DebugReportObjectTypeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UNKNOWN => Some("UNKNOWN"),
Self::INSTANCE => Some("INSTANCE"),
Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"),
Self::DEVICE => Some("DEVICE"),
Self::QUEUE => Some("QUEUE"),
Self::SEMAPHORE => Some("SEMAPHORE"),
Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"),
Self::FENCE => Some("FENCE"),
Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"),
Self::BUFFER => Some("BUFFER"),
Self::IMAGE => Some("IMAGE"),
Self::EVENT => Some("EVENT"),
Self::QUERY_POOL => Some("QUERY_POOL"),
Self::BUFFER_VIEW => Some("BUFFER_VIEW"),
Self::IMAGE_VIEW => Some("IMAGE_VIEW"),
Self::SHADER_MODULE => Some("SHADER_MODULE"),
Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"),
Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"),
Self::RENDER_PASS => Some("RENDER_PASS"),
Self::PIPELINE => Some("PIPELINE"),
Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"),
Self::SAMPLER => Some("SAMPLER"),
Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"),
Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
Self::FRAMEBUFFER => Some("FRAMEBUFFER"),
Self::COMMAND_POOL => Some("COMMAND_POOL"),
Self::SURFACE_KHR => Some("SURFACE_KHR"),
Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"),
Self::DEBUG_REPORT_CALLBACK => Some("DEBUG_REPORT_CALLBACK"),
Self::DISPLAY_KHR => Some("DISPLAY_KHR"),
Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"),
Self::OBJECT_TABLE_NVX => Some("OBJECT_TABLE_NVX"),
Self::INDIRECT_COMMANDS_LAYOUT_NVX => Some("INDIRECT_COMMANDS_LAYOUT_NVX"),
Self::VALIDATION_CACHE => Some("VALIDATION_CACHE"),
Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"),
Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"),
Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DebugUtilsMessageSeverityFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DebugUtilsMessageSeverityFlagsEXT::VERBOSE.0, "VERBOSE"),
(DebugUtilsMessageSeverityFlagsEXT::INFO.0, "INFO"),
(DebugUtilsMessageSeverityFlagsEXT::WARNING.0, "WARNING"),
(DebugUtilsMessageSeverityFlagsEXT::ERROR.0, "ERROR"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DebugUtilsMessageTypeFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DebugUtilsMessageTypeFlagsEXT::GENERAL.0, "GENERAL"),
(DebugUtilsMessageTypeFlagsEXT::VALIDATION.0, "VALIDATION"),
(DebugUtilsMessageTypeFlagsEXT::PERFORMANCE.0, "PERFORMANCE"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DebugUtilsMessengerCallbackDataFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DebugUtilsMessengerCreateFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DependencyFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DependencyFlags::BY_REGION.0, "BY_REGION"),
(DependencyFlags::DEVICE_GROUP.0, "DEVICE_GROUP"),
(DependencyFlags::VIEW_LOCAL.0, "VIEW_LOCAL"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorBindingFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
DescriptorBindingFlagsEXT::UPDATE_AFTER_BIND.0,
"UPDATE_AFTER_BIND",
),
(
DescriptorBindingFlagsEXT::UPDATE_UNUSED_WHILE_PENDING.0,
"UPDATE_UNUSED_WHILE_PENDING",
),
(
DescriptorBindingFlagsEXT::PARTIALLY_BOUND.0,
"PARTIALLY_BOUND",
),
(
DescriptorBindingFlagsEXT::VARIABLE_DESCRIPTOR_COUNT.0,
"VARIABLE_DESCRIPTOR_COUNT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorPoolCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET.0,
"FREE_DESCRIPTOR_SET",
),
(
DescriptorPoolCreateFlags::UPDATE_AFTER_BIND_EXT.0,
"UPDATE_AFTER_BIND_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorPoolResetFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorSetLayoutCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
DescriptorSetLayoutCreateFlags::PUSH_DESCRIPTOR_KHR.0,
"PUSH_DESCRIPTOR_KHR",
),
(
DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL_EXT.0,
"UPDATE_AFTER_BIND_POOL_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::SAMPLER => Some("SAMPLER"),
Self::COMBINED_IMAGE_SAMPLER => Some("COMBINED_IMAGE_SAMPLER"),
Self::SAMPLED_IMAGE => Some("SAMPLED_IMAGE"),
Self::STORAGE_IMAGE => Some("STORAGE_IMAGE"),
Self::UNIFORM_TEXEL_BUFFER => Some("UNIFORM_TEXEL_BUFFER"),
Self::STORAGE_TEXEL_BUFFER => Some("STORAGE_TEXEL_BUFFER"),
Self::UNIFORM_BUFFER => Some("UNIFORM_BUFFER"),
Self::STORAGE_BUFFER => Some("STORAGE_BUFFER"),
Self::UNIFORM_BUFFER_DYNAMIC => Some("UNIFORM_BUFFER_DYNAMIC"),
Self::STORAGE_BUFFER_DYNAMIC => Some("STORAGE_BUFFER_DYNAMIC"),
Self::INPUT_ATTACHMENT => Some("INPUT_ATTACHMENT"),
Self::INLINE_UNIFORM_BLOCK_EXT => Some("INLINE_UNIFORM_BLOCK_EXT"),
Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DescriptorUpdateTemplateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DescriptorUpdateTemplateType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DeviceCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DeviceEventTypeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DISPLAY_HOTPLUG => Some("DISPLAY_HOTPLUG"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DeviceGroupPresentModeFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DeviceGroupPresentModeFlagsKHR::LOCAL.0, "LOCAL"),
(DeviceGroupPresentModeFlagsKHR::REMOTE.0, "REMOTE"),
(DeviceGroupPresentModeFlagsKHR::SUM.0, "SUM"),
(
DeviceGroupPresentModeFlagsKHR::LOCAL_MULTI_DEVICE.0,
"LOCAL_MULTI_DEVICE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DeviceQueueCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(DeviceQueueCreateFlags::PROTECTED.0, "PROTECTED")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DiscardRectangleModeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::INCLUSIVE => Some("INCLUSIVE"),
Self::EXCLUSIVE => Some("EXCLUSIVE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DisplayEventTypeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::FIRST_PIXEL_OUT => Some("FIRST_PIXEL_OUT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DisplayModeCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DisplayPlaneAlphaFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(DisplayPlaneAlphaFlagsKHR::OPAQUE.0, "OPAQUE"),
(DisplayPlaneAlphaFlagsKHR::GLOBAL.0, "GLOBAL"),
(DisplayPlaneAlphaFlagsKHR::PER_PIXEL.0, "PER_PIXEL"),
(
DisplayPlaneAlphaFlagsKHR::PER_PIXEL_PREMULTIPLIED.0,
"PER_PIXEL_PREMULTIPLIED",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DisplayPowerStateEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::OFF => Some("OFF"),
Self::SUSPEND => Some("SUSPEND"),
Self::ON => Some("ON"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DisplaySurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for DriverIdKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::AMD_PROPRIETARY => Some("AMD_PROPRIETARY"),
Self::AMD_OPEN_SOURCE => Some("AMD_OPEN_SOURCE"),
Self::MESA_RADV => Some("MESA_RADV"),
Self::NVIDIA_PROPRIETARY => Some("NVIDIA_PROPRIETARY"),
Self::INTEL_PROPRIETARY_WINDOWS => Some("INTEL_PROPRIETARY_WINDOWS"),
Self::INTEL_OPEN_SOURCE_MESA => Some("INTEL_OPEN_SOURCE_MESA"),
Self::IMAGINATION_PROPRIETARY => Some("IMAGINATION_PROPRIETARY"),
Self::QUALCOMM_PROPRIETARY => Some("QUALCOMM_PROPRIETARY"),
Self::ARM_PROPRIETARY => Some("ARM_PROPRIETARY"),
Self::GOOGLE_PASTEL => Some("GOOGLE_PASTEL"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for DynamicState {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::VIEWPORT => Some("VIEWPORT"),
Self::SCISSOR => Some("SCISSOR"),
Self::LINE_WIDTH => Some("LINE_WIDTH"),
Self::DEPTH_BIAS => Some("DEPTH_BIAS"),
Self::BLEND_CONSTANTS => Some("BLEND_CONSTANTS"),
Self::DEPTH_BOUNDS => Some("DEPTH_BOUNDS"),
Self::STENCIL_COMPARE_MASK => Some("STENCIL_COMPARE_MASK"),
Self::STENCIL_WRITE_MASK => Some("STENCIL_WRITE_MASK"),
Self::STENCIL_REFERENCE => Some("STENCIL_REFERENCE"),
Self::VIEWPORT_W_SCALING_NV => Some("VIEWPORT_W_SCALING_NV"),
Self::DISCARD_RECTANGLE_EXT => Some("DISCARD_RECTANGLE_EXT"),
Self::SAMPLE_LOCATIONS_EXT => Some("SAMPLE_LOCATIONS_EXT"),
Self::VIEWPORT_SHADING_RATE_PALETTE_NV => Some("VIEWPORT_SHADING_RATE_PALETTE_NV"),
Self::VIEWPORT_COARSE_SAMPLE_ORDER_NV => Some("VIEWPORT_COARSE_SAMPLE_ORDER_NV"),
Self::EXCLUSIVE_SCISSOR_NV => Some("EXCLUSIVE_SCISSOR_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for EventCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalFenceFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalFenceFeatureFlags::EXTERNAL_FENCE_FEATURE_EXPORTABLE.0,
"EXTERNAL_FENCE_FEATURE_EXPORTABLE",
),
(
ExternalFenceFeatureFlags::EXTERNAL_FENCE_FEATURE_IMPORTABLE.0,
"EXTERNAL_FENCE_FEATURE_IMPORTABLE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalFenceHandleTypeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalFenceHandleTypeFlags::EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD.0,
"EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD",
),
(
ExternalFenceHandleTypeFlags::EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32.0,
"EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32",
),
(
ExternalFenceHandleTypeFlags::EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT.0,
"EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT",
),
(
ExternalFenceHandleTypeFlags::EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD.0,
"EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalMemoryFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalMemoryFeatureFlags::EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY.0,
"EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY",
),
(
ExternalMemoryFeatureFlags::EXTERNAL_MEMORY_FEATURE_EXPORTABLE.0,
"EXTERNAL_MEMORY_FEATURE_EXPORTABLE",
),
(
ExternalMemoryFeatureFlags::EXTERNAL_MEMORY_FEATURE_IMPORTABLE.0,
"EXTERNAL_MEMORY_FEATURE_IMPORTABLE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalMemoryFeatureFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalMemoryFeatureFlagsNV::EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_NV.0,
"EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_NV",
),
(
ExternalMemoryFeatureFlagsNV::EXTERNAL_MEMORY_FEATURE_EXPORTABLE_NV.0,
"EXTERNAL_MEMORY_FEATURE_EXPORTABLE_NV",
),
(
ExternalMemoryFeatureFlagsNV::EXTERNAL_MEMORY_FEATURE_IMPORTABLE_NV.0,
"EXTERNAL_MEMORY_FEATURE_IMPORTABLE_NV",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalMemoryHandleTypeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN : & [ ( Flags , & str ) ] = & [ ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32 . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_ANDROID . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_ANDROID" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION" ) , ( ExternalMemoryHandleTypeFlags :: EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY . 0 , "EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY" ) ] ;
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalMemoryHandleTypeFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalMemoryHandleTypeFlagsNV::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_NV.0,
"EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_NV",
),
(
ExternalMemoryHandleTypeFlagsNV::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_NV.0,
"EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_NV",
),
(
ExternalMemoryHandleTypeFlagsNV::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_NV.0,
"EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_NV",
),
(
ExternalMemoryHandleTypeFlagsNV::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_NV.0,
"EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_NV",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalSemaphoreFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalSemaphoreFeatureFlags::EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE.0,
"EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE",
),
(
ExternalSemaphoreFeatureFlags::EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE.0,
"EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ExternalSemaphoreHandleTypeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
ExternalSemaphoreHandleTypeFlags::EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD.0,
"EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD",
),
(
ExternalSemaphoreHandleTypeFlags::EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32.0,
"EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32",
),
(
ExternalSemaphoreHandleTypeFlags::EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT.0,
"EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT",
),
(
ExternalSemaphoreHandleTypeFlags::EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE.0,
"EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE",
),
(
ExternalSemaphoreHandleTypeFlags::EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD.0,
"EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for FenceCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(FenceCreateFlags::SIGNALED.0, "SIGNALED")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for FenceImportFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(FenceImportFlags::TEMPORARY.0, "TEMPORARY")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for Filter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::NEAREST => Some("NEAREST"),
Self::LINEAR => Some("LINEAR"),
Self::CUBIC_IMG => Some("CUBIC_IMG"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for Format {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UNDEFINED => Some("UNDEFINED"),
Self::R4G4_UNORM_PACK8 => Some("R4G4_UNORM_PACK8"),
Self::R4G4B4A4_UNORM_PACK16 => Some("R4G4B4A4_UNORM_PACK16"),
Self::B4G4R4A4_UNORM_PACK16 => Some("B4G4R4A4_UNORM_PACK16"),
Self::R5G6B5_UNORM_PACK16 => Some("R5G6B5_UNORM_PACK16"),
Self::B5G6R5_UNORM_PACK16 => Some("B5G6R5_UNORM_PACK16"),
Self::R5G5B5A1_UNORM_PACK16 => Some("R5G5B5A1_UNORM_PACK16"),
Self::B5G5R5A1_UNORM_PACK16 => Some("B5G5R5A1_UNORM_PACK16"),
Self::A1R5G5B5_UNORM_PACK16 => Some("A1R5G5B5_UNORM_PACK16"),
Self::R8_UNORM => Some("R8_UNORM"),
Self::R8_SNORM => Some("R8_SNORM"),
Self::R8_USCALED => Some("R8_USCALED"),
Self::R8_SSCALED => Some("R8_SSCALED"),
Self::R8_UINT => Some("R8_UINT"),
Self::R8_SINT => Some("R8_SINT"),
Self::R8_SRGB => Some("R8_SRGB"),
Self::R8G8_UNORM => Some("R8G8_UNORM"),
Self::R8G8_SNORM => Some("R8G8_SNORM"),
Self::R8G8_USCALED => Some("R8G8_USCALED"),
Self::R8G8_SSCALED => Some("R8G8_SSCALED"),
Self::R8G8_UINT => Some("R8G8_UINT"),
Self::R8G8_SINT => Some("R8G8_SINT"),
Self::R8G8_SRGB => Some("R8G8_SRGB"),
Self::R8G8B8_UNORM => Some("R8G8B8_UNORM"),
Self::R8G8B8_SNORM => Some("R8G8B8_SNORM"),
Self::R8G8B8_USCALED => Some("R8G8B8_USCALED"),
Self::R8G8B8_SSCALED => Some("R8G8B8_SSCALED"),
Self::R8G8B8_UINT => Some("R8G8B8_UINT"),
Self::R8G8B8_SINT => Some("R8G8B8_SINT"),
Self::R8G8B8_SRGB => Some("R8G8B8_SRGB"),
Self::B8G8R8_UNORM => Some("B8G8R8_UNORM"),
Self::B8G8R8_SNORM => Some("B8G8R8_SNORM"),
Self::B8G8R8_USCALED => Some("B8G8R8_USCALED"),
Self::B8G8R8_SSCALED => Some("B8G8R8_SSCALED"),
Self::B8G8R8_UINT => Some("B8G8R8_UINT"),
Self::B8G8R8_SINT => Some("B8G8R8_SINT"),
Self::B8G8R8_SRGB => Some("B8G8R8_SRGB"),
Self::R8G8B8A8_UNORM => Some("R8G8B8A8_UNORM"),
Self::R8G8B8A8_SNORM => Some("R8G8B8A8_SNORM"),
Self::R8G8B8A8_USCALED => Some("R8G8B8A8_USCALED"),
Self::R8G8B8A8_SSCALED => Some("R8G8B8A8_SSCALED"),
Self::R8G8B8A8_UINT => Some("R8G8B8A8_UINT"),
Self::R8G8B8A8_SINT => Some("R8G8B8A8_SINT"),
Self::R8G8B8A8_SRGB => Some("R8G8B8A8_SRGB"),
Self::B8G8R8A8_UNORM => Some("B8G8R8A8_UNORM"),
Self::B8G8R8A8_SNORM => Some("B8G8R8A8_SNORM"),
Self::B8G8R8A8_USCALED => Some("B8G8R8A8_USCALED"),
Self::B8G8R8A8_SSCALED => Some("B8G8R8A8_SSCALED"),
Self::B8G8R8A8_UINT => Some("B8G8R8A8_UINT"),
Self::B8G8R8A8_SINT => Some("B8G8R8A8_SINT"),
Self::B8G8R8A8_SRGB => Some("B8G8R8A8_SRGB"),
Self::A8B8G8R8_UNORM_PACK32 => Some("A8B8G8R8_UNORM_PACK32"),
Self::A8B8G8R8_SNORM_PACK32 => Some("A8B8G8R8_SNORM_PACK32"),
Self::A8B8G8R8_USCALED_PACK32 => Some("A8B8G8R8_USCALED_PACK32"),
Self::A8B8G8R8_SSCALED_PACK32 => Some("A8B8G8R8_SSCALED_PACK32"),
Self::A8B8G8R8_UINT_PACK32 => Some("A8B8G8R8_UINT_PACK32"),
Self::A8B8G8R8_SINT_PACK32 => Some("A8B8G8R8_SINT_PACK32"),
Self::A8B8G8R8_SRGB_PACK32 => Some("A8B8G8R8_SRGB_PACK32"),
Self::A2R10G10B10_UNORM_PACK32 => Some("A2R10G10B10_UNORM_PACK32"),
Self::A2R10G10B10_SNORM_PACK32 => Some("A2R10G10B10_SNORM_PACK32"),
Self::A2R10G10B10_USCALED_PACK32 => Some("A2R10G10B10_USCALED_PACK32"),
Self::A2R10G10B10_SSCALED_PACK32 => Some("A2R10G10B10_SSCALED_PACK32"),
Self::A2R10G10B10_UINT_PACK32 => Some("A2R10G10B10_UINT_PACK32"),
Self::A2R10G10B10_SINT_PACK32 => Some("A2R10G10B10_SINT_PACK32"),
Self::A2B10G10R10_UNORM_PACK32 => Some("A2B10G10R10_UNORM_PACK32"),
Self::A2B10G10R10_SNORM_PACK32 => Some("A2B10G10R10_SNORM_PACK32"),
Self::A2B10G10R10_USCALED_PACK32 => Some("A2B10G10R10_USCALED_PACK32"),
Self::A2B10G10R10_SSCALED_PACK32 => Some("A2B10G10R10_SSCALED_PACK32"),
Self::A2B10G10R10_UINT_PACK32 => Some("A2B10G10R10_UINT_PACK32"),
Self::A2B10G10R10_SINT_PACK32 => Some("A2B10G10R10_SINT_PACK32"),
Self::R16_UNORM => Some("R16_UNORM"),
Self::R16_SNORM => Some("R16_SNORM"),
Self::R16_USCALED => Some("R16_USCALED"),
Self::R16_SSCALED => Some("R16_SSCALED"),
Self::R16_UINT => Some("R16_UINT"),
Self::R16_SINT => Some("R16_SINT"),
Self::R16_SFLOAT => Some("R16_SFLOAT"),
Self::R16G16_UNORM => Some("R16G16_UNORM"),
Self::R16G16_SNORM => Some("R16G16_SNORM"),
Self::R16G16_USCALED => Some("R16G16_USCALED"),
Self::R16G16_SSCALED => Some("R16G16_SSCALED"),
Self::R16G16_UINT => Some("R16G16_UINT"),
Self::R16G16_SINT => Some("R16G16_SINT"),
Self::R16G16_SFLOAT => Some("R16G16_SFLOAT"),
Self::R16G16B16_UNORM => Some("R16G16B16_UNORM"),
Self::R16G16B16_SNORM => Some("R16G16B16_SNORM"),
Self::R16G16B16_USCALED => Some("R16G16B16_USCALED"),
Self::R16G16B16_SSCALED => Some("R16G16B16_SSCALED"),
Self::R16G16B16_UINT => Some("R16G16B16_UINT"),
Self::R16G16B16_SINT => Some("R16G16B16_SINT"),
Self::R16G16B16_SFLOAT => Some("R16G16B16_SFLOAT"),
Self::R16G16B16A16_UNORM => Some("R16G16B16A16_UNORM"),
Self::R16G16B16A16_SNORM => Some("R16G16B16A16_SNORM"),
Self::R16G16B16A16_USCALED => Some("R16G16B16A16_USCALED"),
Self::R16G16B16A16_SSCALED => Some("R16G16B16A16_SSCALED"),
Self::R16G16B16A16_UINT => Some("R16G16B16A16_UINT"),
Self::R16G16B16A16_SINT => Some("R16G16B16A16_SINT"),
Self::R16G16B16A16_SFLOAT => Some("R16G16B16A16_SFLOAT"),
Self::R32_UINT => Some("R32_UINT"),
Self::R32_SINT => Some("R32_SINT"),
Self::R32_SFLOAT => Some("R32_SFLOAT"),
Self::R32G32_UINT => Some("R32G32_UINT"),
Self::R32G32_SINT => Some("R32G32_SINT"),
Self::R32G32_SFLOAT => Some("R32G32_SFLOAT"),
Self::R32G32B32_UINT => Some("R32G32B32_UINT"),
Self::R32G32B32_SINT => Some("R32G32B32_SINT"),
Self::R32G32B32_SFLOAT => Some("R32G32B32_SFLOAT"),
Self::R32G32B32A32_UINT => Some("R32G32B32A32_UINT"),
Self::R32G32B32A32_SINT => Some("R32G32B32A32_SINT"),
Self::R32G32B32A32_SFLOAT => Some("R32G32B32A32_SFLOAT"),
Self::R64_UINT => Some("R64_UINT"),
Self::R64_SINT => Some("R64_SINT"),
Self::R64_SFLOAT => Some("R64_SFLOAT"),
Self::R64G64_UINT => Some("R64G64_UINT"),
Self::R64G64_SINT => Some("R64G64_SINT"),
Self::R64G64_SFLOAT => Some("R64G64_SFLOAT"),
Self::R64G64B64_UINT => Some("R64G64B64_UINT"),
Self::R64G64B64_SINT => Some("R64G64B64_SINT"),
Self::R64G64B64_SFLOAT => Some("R64G64B64_SFLOAT"),
Self::R64G64B64A64_UINT => Some("R64G64B64A64_UINT"),
Self::R64G64B64A64_SINT => Some("R64G64B64A64_SINT"),
Self::R64G64B64A64_SFLOAT => Some("R64G64B64A64_SFLOAT"),
Self::B10G11R11_UFLOAT_PACK32 => Some("B10G11R11_UFLOAT_PACK32"),
Self::E5B9G9R9_UFLOAT_PACK32 => Some("E5B9G9R9_UFLOAT_PACK32"),
Self::D16_UNORM => Some("D16_UNORM"),
Self::X8_D24_UNORM_PACK32 => Some("X8_D24_UNORM_PACK32"),
Self::D32_SFLOAT => Some("D32_SFLOAT"),
Self::S8_UINT => Some("S8_UINT"),
Self::D16_UNORM_S8_UINT => Some("D16_UNORM_S8_UINT"),
Self::D24_UNORM_S8_UINT => Some("D24_UNORM_S8_UINT"),
Self::D32_SFLOAT_S8_UINT => Some("D32_SFLOAT_S8_UINT"),
Self::BC1_RGB_UNORM_BLOCK => Some("BC1_RGB_UNORM_BLOCK"),
Self::BC1_RGB_SRGB_BLOCK => Some("BC1_RGB_SRGB_BLOCK"),
Self::BC1_RGBA_UNORM_BLOCK => Some("BC1_RGBA_UNORM_BLOCK"),
Self::BC1_RGBA_SRGB_BLOCK => Some("BC1_RGBA_SRGB_BLOCK"),
Self::BC2_UNORM_BLOCK => Some("BC2_UNORM_BLOCK"),
Self::BC2_SRGB_BLOCK => Some("BC2_SRGB_BLOCK"),
Self::BC3_UNORM_BLOCK => Some("BC3_UNORM_BLOCK"),
Self::BC3_SRGB_BLOCK => Some("BC3_SRGB_BLOCK"),
Self::BC4_UNORM_BLOCK => Some("BC4_UNORM_BLOCK"),
Self::BC4_SNORM_BLOCK => Some("BC4_SNORM_BLOCK"),
Self::BC5_UNORM_BLOCK => Some("BC5_UNORM_BLOCK"),
Self::BC5_SNORM_BLOCK => Some("BC5_SNORM_BLOCK"),
Self::BC6H_UFLOAT_BLOCK => Some("BC6H_UFLOAT_BLOCK"),
Self::BC6H_SFLOAT_BLOCK => Some("BC6H_SFLOAT_BLOCK"),
Self::BC7_UNORM_BLOCK => Some("BC7_UNORM_BLOCK"),
Self::BC7_SRGB_BLOCK => Some("BC7_SRGB_BLOCK"),
Self::ETC2_R8G8B8_UNORM_BLOCK => Some("ETC2_R8G8B8_UNORM_BLOCK"),
Self::ETC2_R8G8B8_SRGB_BLOCK => Some("ETC2_R8G8B8_SRGB_BLOCK"),
Self::ETC2_R8G8B8A1_UNORM_BLOCK => Some("ETC2_R8G8B8A1_UNORM_BLOCK"),
Self::ETC2_R8G8B8A1_SRGB_BLOCK => Some("ETC2_R8G8B8A1_SRGB_BLOCK"),
Self::ETC2_R8G8B8A8_UNORM_BLOCK => Some("ETC2_R8G8B8A8_UNORM_BLOCK"),
Self::ETC2_R8G8B8A8_SRGB_BLOCK => Some("ETC2_R8G8B8A8_SRGB_BLOCK"),
Self::EAC_R11_UNORM_BLOCK => Some("EAC_R11_UNORM_BLOCK"),
Self::EAC_R11_SNORM_BLOCK => Some("EAC_R11_SNORM_BLOCK"),
Self::EAC_R11G11_UNORM_BLOCK => Some("EAC_R11G11_UNORM_BLOCK"),
Self::EAC_R11G11_SNORM_BLOCK => Some("EAC_R11G11_SNORM_BLOCK"),
Self::ASTC_4X4_UNORM_BLOCK => Some("ASTC_4X4_UNORM_BLOCK"),
Self::ASTC_4X4_SRGB_BLOCK => Some("ASTC_4X4_SRGB_BLOCK"),
Self::ASTC_5X4_UNORM_BLOCK => Some("ASTC_5X4_UNORM_BLOCK"),
Self::ASTC_5X4_SRGB_BLOCK => Some("ASTC_5X4_SRGB_BLOCK"),
Self::ASTC_5X5_UNORM_BLOCK => Some("ASTC_5X5_UNORM_BLOCK"),
Self::ASTC_5X5_SRGB_BLOCK => Some("ASTC_5X5_SRGB_BLOCK"),
Self::ASTC_6X5_UNORM_BLOCK => Some("ASTC_6X5_UNORM_BLOCK"),
Self::ASTC_6X5_SRGB_BLOCK => Some("ASTC_6X5_SRGB_BLOCK"),
Self::ASTC_6X6_UNORM_BLOCK => Some("ASTC_6X6_UNORM_BLOCK"),
Self::ASTC_6X6_SRGB_BLOCK => Some("ASTC_6X6_SRGB_BLOCK"),
Self::ASTC_8X5_UNORM_BLOCK => Some("ASTC_8X5_UNORM_BLOCK"),
Self::ASTC_8X5_SRGB_BLOCK => Some("ASTC_8X5_SRGB_BLOCK"),
Self::ASTC_8X6_UNORM_BLOCK => Some("ASTC_8X6_UNORM_BLOCK"),
Self::ASTC_8X6_SRGB_BLOCK => Some("ASTC_8X6_SRGB_BLOCK"),
Self::ASTC_8X8_UNORM_BLOCK => Some("ASTC_8X8_UNORM_BLOCK"),
Self::ASTC_8X8_SRGB_BLOCK => Some("ASTC_8X8_SRGB_BLOCK"),
Self::ASTC_10X5_UNORM_BLOCK => Some("ASTC_10X5_UNORM_BLOCK"),
Self::ASTC_10X5_SRGB_BLOCK => Some("ASTC_10X5_SRGB_BLOCK"),
Self::ASTC_10X6_UNORM_BLOCK => Some("ASTC_10X6_UNORM_BLOCK"),
Self::ASTC_10X6_SRGB_BLOCK => Some("ASTC_10X6_SRGB_BLOCK"),
Self::ASTC_10X8_UNORM_BLOCK => Some("ASTC_10X8_UNORM_BLOCK"),
Self::ASTC_10X8_SRGB_BLOCK => Some("ASTC_10X8_SRGB_BLOCK"),
Self::ASTC_10X10_UNORM_BLOCK => Some("ASTC_10X10_UNORM_BLOCK"),
Self::ASTC_10X10_SRGB_BLOCK => Some("ASTC_10X10_SRGB_BLOCK"),
Self::ASTC_12X10_UNORM_BLOCK => Some("ASTC_12X10_UNORM_BLOCK"),
Self::ASTC_12X10_SRGB_BLOCK => Some("ASTC_12X10_SRGB_BLOCK"),
Self::ASTC_12X12_UNORM_BLOCK => Some("ASTC_12X12_UNORM_BLOCK"),
Self::ASTC_12X12_SRGB_BLOCK => Some("ASTC_12X12_SRGB_BLOCK"),
Self::PVRTC1_2BPP_UNORM_BLOCK_IMG => Some("PVRTC1_2BPP_UNORM_BLOCK_IMG"),
Self::PVRTC1_4BPP_UNORM_BLOCK_IMG => Some("PVRTC1_4BPP_UNORM_BLOCK_IMG"),
Self::PVRTC2_2BPP_UNORM_BLOCK_IMG => Some("PVRTC2_2BPP_UNORM_BLOCK_IMG"),
Self::PVRTC2_4BPP_UNORM_BLOCK_IMG => Some("PVRTC2_4BPP_UNORM_BLOCK_IMG"),
Self::PVRTC1_2BPP_SRGB_BLOCK_IMG => Some("PVRTC1_2BPP_SRGB_BLOCK_IMG"),
Self::PVRTC1_4BPP_SRGB_BLOCK_IMG => Some("PVRTC1_4BPP_SRGB_BLOCK_IMG"),
Self::PVRTC2_2BPP_SRGB_BLOCK_IMG => Some("PVRTC2_2BPP_SRGB_BLOCK_IMG"),
Self::PVRTC2_4BPP_SRGB_BLOCK_IMG => Some("PVRTC2_4BPP_SRGB_BLOCK_IMG"),
Self::G8B8G8R8_422_UNORM => Some("G8B8G8R8_422_UNORM"),
Self::B8G8R8G8_422_UNORM => Some("B8G8R8G8_422_UNORM"),
Self::G8_B8_R8_3PLANE_420_UNORM => Some("G8_B8_R8_3PLANE_420_UNORM"),
Self::G8_B8R8_2PLANE_420_UNORM => Some("G8_B8R8_2PLANE_420_UNORM"),
Self::G8_B8_R8_3PLANE_422_UNORM => Some("G8_B8_R8_3PLANE_422_UNORM"),
Self::G8_B8R8_2PLANE_422_UNORM => Some("G8_B8R8_2PLANE_422_UNORM"),
Self::G8_B8_R8_3PLANE_444_UNORM => Some("G8_B8_R8_3PLANE_444_UNORM"),
Self::R10X6_UNORM_PACK16 => Some("R10X6_UNORM_PACK16"),
Self::R10X6G10X6_UNORM_2PACK16 => Some("R10X6G10X6_UNORM_2PACK16"),
Self::R10X6G10X6B10X6A10X6_UNORM_4PACK16 => Some("R10X6G10X6B10X6A10X6_UNORM_4PACK16"),
Self::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 => {
Some("G10X6B10X6G10X6R10X6_422_UNORM_4PACK16")
}
Self::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 => {
Some("B10X6G10X6R10X6G10X6_422_UNORM_4PACK16")
}
Self::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 => {
Some("G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16")
}
Self::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 => {
Some("G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16")
}
Self::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 => {
Some("G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16")
}
Self::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 => {
Some("G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16")
}
Self::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 => {
Some("G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16")
}
Self::R12X4_UNORM_PACK16 => Some("R12X4_UNORM_PACK16"),
Self::R12X4G12X4_UNORM_2PACK16 => Some("R12X4G12X4_UNORM_2PACK16"),
Self::R12X4G12X4B12X4A12X4_UNORM_4PACK16 => Some("R12X4G12X4B12X4A12X4_UNORM_4PACK16"),
Self::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 => {
Some("G12X4B12X4G12X4R12X4_422_UNORM_4PACK16")
}
Self::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 => {
Some("B12X4G12X4R12X4G12X4_422_UNORM_4PACK16")
}
Self::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 => {
Some("G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16")
}
Self::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 => {
Some("G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16")
}
Self::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 => {
Some("G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16")
}
Self::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 => {
Some("G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16")
}
Self::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 => {
Some("G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16")
}
Self::G16B16G16R16_422_UNORM => Some("G16B16G16R16_422_UNORM"),
Self::B16G16R16G16_422_UNORM => Some("B16G16R16G16_422_UNORM"),
Self::G16_B16_R16_3PLANE_420_UNORM => Some("G16_B16_R16_3PLANE_420_UNORM"),
Self::G16_B16R16_2PLANE_420_UNORM => Some("G16_B16R16_2PLANE_420_UNORM"),
Self::G16_B16_R16_3PLANE_422_UNORM => Some("G16_B16_R16_3PLANE_422_UNORM"),
Self::G16_B16R16_2PLANE_422_UNORM => Some("G16_B16R16_2PLANE_422_UNORM"),
Self::G16_B16_R16_3PLANE_444_UNORM => Some("G16_B16_R16_3PLANE_444_UNORM"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for FormatFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN : & [ ( Flags , & str ) ] = & [ ( FormatFeatureFlags :: SAMPLED_IMAGE . 0 , "SAMPLED_IMAGE" ) , ( FormatFeatureFlags :: STORAGE_IMAGE . 0 , "STORAGE_IMAGE" ) , ( FormatFeatureFlags :: STORAGE_IMAGE_ATOMIC . 0 , "STORAGE_IMAGE_ATOMIC" ) , ( FormatFeatureFlags :: UNIFORM_TEXEL_BUFFER . 0 , "UNIFORM_TEXEL_BUFFER" ) , ( FormatFeatureFlags :: STORAGE_TEXEL_BUFFER . 0 , "STORAGE_TEXEL_BUFFER" ) , ( FormatFeatureFlags :: STORAGE_TEXEL_BUFFER_ATOMIC . 0 , "STORAGE_TEXEL_BUFFER_ATOMIC" ) , ( FormatFeatureFlags :: VERTEX_BUFFER . 0 , "VERTEX_BUFFER" ) , ( FormatFeatureFlags :: COLOR_ATTACHMENT . 0 , "COLOR_ATTACHMENT" ) , ( FormatFeatureFlags :: COLOR_ATTACHMENT_BLEND . 0 , "COLOR_ATTACHMENT_BLEND" ) , ( FormatFeatureFlags :: DEPTH_STENCIL_ATTACHMENT . 0 , "DEPTH_STENCIL_ATTACHMENT" ) , ( FormatFeatureFlags :: BLIT_SRC . 0 , "BLIT_SRC" ) , ( FormatFeatureFlags :: BLIT_DST . 0 , "BLIT_DST" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_LINEAR . 0 , "SAMPLED_IMAGE_FILTER_LINEAR" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_CUBIC_IMG . 0 , "SAMPLED_IMAGE_FILTER_CUBIC_IMG" ) , ( FormatFeatureFlags :: RESERVED_27_KHR . 0 , "RESERVED_27_KHR" ) , ( FormatFeatureFlags :: RESERVED_28_KHR . 0 , "RESERVED_28_KHR" ) , ( FormatFeatureFlags :: RESERVED_25_KHR . 0 , "RESERVED_25_KHR" ) , ( FormatFeatureFlags :: RESERVED_26_KHR . 0 , "RESERVED_26_KHR" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_MINMAX_EXT . 0 , "SAMPLED_IMAGE_FILTER_MINMAX_EXT" ) , ( FormatFeatureFlags :: FRAGMENT_DENSITY_MAP_EXT . 0 , "FRAGMENT_DENSITY_MAP_EXT" ) , ( FormatFeatureFlags :: TRANSFER_SRC . 0 , "TRANSFER_SRC" ) , ( FormatFeatureFlags :: TRANSFER_DST . 0 , "TRANSFER_DST" ) , ( FormatFeatureFlags :: MIDPOINT_CHROMA_SAMPLES . 0 , "MIDPOINT_CHROMA_SAMPLES" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT" ) , ( FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE" ) , ( FormatFeatureFlags :: DISJOINT . 0 , "DISJOINT" ) , ( FormatFeatureFlags :: COSITED_CHROMA_SAMPLES . 0 , "COSITED_CHROMA_SAMPLES" ) ] ;
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for FramebufferCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for FrontFace {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::COUNTER_CLOCKWISE => Some("COUNTER_CLOCKWISE"),
Self::CLOCKWISE => Some("CLOCKWISE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for GeometryFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(GeometryFlagsNV::OPAQUE.0, "OPAQUE"),
(
GeometryFlagsNV::NO_DUPLICATE_ANY_HIT_INVOCATION.0,
"NO_DUPLICATE_ANY_HIT_INVOCATION",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for GeometryInstanceFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
GeometryInstanceFlagsNV::TRIANGLE_CULL_DISABLE.0,
"TRIANGLE_CULL_DISABLE",
),
(
GeometryInstanceFlagsNV::TRIANGLE_FRONT_COUNTERCLOCKWISE.0,
"TRIANGLE_FRONT_COUNTERCLOCKWISE",
),
(GeometryInstanceFlagsNV::FORCE_OPAQUE.0, "FORCE_OPAQUE"),
(
GeometryInstanceFlagsNV::FORCE_NO_OPAQUE.0,
"FORCE_NO_OPAQUE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for GeometryTypeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::TRIANGLES => Some("TRIANGLES"),
Self::AABBS => Some("AABBS"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for IOSSurfaceCreateFlagsMVK {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageAspectFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ImageAspectFlags::COLOR.0, "COLOR"),
(ImageAspectFlags::DEPTH.0, "DEPTH"),
(ImageAspectFlags::STENCIL.0, "STENCIL"),
(ImageAspectFlags::METADATA.0, "METADATA"),
(ImageAspectFlags::MEMORY_PLANE_0_EXT.0, "MEMORY_PLANE_0_EXT"),
(ImageAspectFlags::MEMORY_PLANE_1_EXT.0, "MEMORY_PLANE_1_EXT"),
(ImageAspectFlags::MEMORY_PLANE_2_EXT.0, "MEMORY_PLANE_2_EXT"),
(ImageAspectFlags::MEMORY_PLANE_3_EXT.0, "MEMORY_PLANE_3_EXT"),
(ImageAspectFlags::PLANE_0.0, "PLANE_0"),
(ImageAspectFlags::PLANE_1.0, "PLANE_1"),
(ImageAspectFlags::PLANE_2.0, "PLANE_2"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ImageCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
(ImageCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"),
(ImageCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"),
(ImageCreateFlags::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"),
(ImageCreateFlags::CUBE_COMPATIBLE.0, "CUBE_COMPATIBLE"),
(ImageCreateFlags::CORNER_SAMPLED_NV.0, "CORNER_SAMPLED_NV"),
(
ImageCreateFlags::SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT.0,
"SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT",
),
(ImageCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"),
(ImageCreateFlags::ALIAS.0, "ALIAS"),
(
ImageCreateFlags::SPLIT_INSTANCE_BIND_REGIONS.0,
"SPLIT_INSTANCE_BIND_REGIONS",
),
(
ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE.0,
"TYPE_2D_ARRAY_COMPATIBLE",
),
(
ImageCreateFlags::BLOCK_TEXEL_VIEW_COMPATIBLE.0,
"BLOCK_TEXEL_VIEW_COMPATIBLE",
),
(ImageCreateFlags::EXTENDED_USAGE.0, "EXTENDED_USAGE"),
(ImageCreateFlags::PROTECTED.0, "PROTECTED"),
(ImageCreateFlags::DISJOINT.0, "DISJOINT"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageLayout {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UNDEFINED => Some("UNDEFINED"),
Self::GENERAL => Some("GENERAL"),
Self::COLOR_ATTACHMENT_OPTIMAL => Some("COLOR_ATTACHMENT_OPTIMAL"),
Self::DEPTH_STENCIL_ATTACHMENT_OPTIMAL => Some("DEPTH_STENCIL_ATTACHMENT_OPTIMAL"),
Self::DEPTH_STENCIL_READ_ONLY_OPTIMAL => Some("DEPTH_STENCIL_READ_ONLY_OPTIMAL"),
Self::SHADER_READ_ONLY_OPTIMAL => Some("SHADER_READ_ONLY_OPTIMAL"),
Self::TRANSFER_SRC_OPTIMAL => Some("TRANSFER_SRC_OPTIMAL"),
Self::TRANSFER_DST_OPTIMAL => Some("TRANSFER_DST_OPTIMAL"),
Self::PREINITIALIZED => Some("PREINITIALIZED"),
Self::PRESENT_SRC_KHR => Some("PRESENT_SRC_KHR"),
Self::SHARED_PRESENT_KHR => Some("SHARED_PRESENT_KHR"),
Self::SHADING_RATE_OPTIMAL_NV => Some("SHADING_RATE_OPTIMAL_NV"),
Self::FRAGMENT_DENSITY_MAP_OPTIMAL_EXT => Some("FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"),
Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL => {
Some("DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL")
}
Self::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL => {
Some("DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL")
}
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ImagePipeSurfaceCreateFlagsFUCHSIA {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageTiling {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::OPTIMAL => Some("OPTIMAL"),
Self::LINEAR => Some("LINEAR"),
Self::DRM_FORMAT_MODIFIER_EXT => Some("DRM_FORMAT_MODIFIER_EXT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ImageType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::TYPE_1D => Some("TYPE_1D"),
Self::TYPE_2D => Some("TYPE_2D"),
Self::TYPE_3D => Some("TYPE_3D"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ImageUsageFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ImageUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"),
(ImageUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"),
(ImageUsageFlags::SAMPLED.0, "SAMPLED"),
(ImageUsageFlags::STORAGE.0, "STORAGE"),
(ImageUsageFlags::COLOR_ATTACHMENT.0, "COLOR_ATTACHMENT"),
(
ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT.0,
"DEPTH_STENCIL_ATTACHMENT",
),
(
ImageUsageFlags::TRANSIENT_ATTACHMENT.0,
"TRANSIENT_ATTACHMENT",
),
(ImageUsageFlags::INPUT_ATTACHMENT.0, "INPUT_ATTACHMENT"),
(ImageUsageFlags::RESERVED_13_KHR.0, "RESERVED_13_KHR"),
(ImageUsageFlags::RESERVED_14_KHR.0, "RESERVED_14_KHR"),
(ImageUsageFlags::RESERVED_15_KHR.0, "RESERVED_15_KHR"),
(ImageUsageFlags::RESERVED_10_KHR.0, "RESERVED_10_KHR"),
(ImageUsageFlags::RESERVED_11_KHR.0, "RESERVED_11_KHR"),
(ImageUsageFlags::RESERVED_12_KHR.0, "RESERVED_12_KHR"),
(
ImageUsageFlags::SHADING_RATE_IMAGE_NV.0,
"SHADING_RATE_IMAGE_NV",
),
(
ImageUsageFlags::FRAGMENT_DENSITY_MAP_EXT.0,
"FRAGMENT_DENSITY_MAP_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageViewCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(
ImageViewCreateFlags::FRAGMENT_DENSITY_MAP_DYNAMIC_EXT.0,
"FRAGMENT_DENSITY_MAP_DYNAMIC_EXT",
)];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ImageViewType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::TYPE_1D => Some("TYPE_1D"),
Self::TYPE_2D => Some("TYPE_2D"),
Self::TYPE_3D => Some("TYPE_3D"),
Self::CUBE => Some("CUBE"),
Self::TYPE_1D_ARRAY => Some("TYPE_1D_ARRAY"),
Self::TYPE_2D_ARRAY => Some("TYPE_2D_ARRAY"),
Self::CUBE_ARRAY => Some("CUBE_ARRAY"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for IndexType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UINT16 => Some("UINT16"),
Self::UINT32 => Some("UINT32"),
Self::NONE_NV => Some("NONE_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for IndirectCommandsLayoutUsageFlagsNVX {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
IndirectCommandsLayoutUsageFlagsNVX::UNORDERED_SEQUENCES.0,
"UNORDERED_SEQUENCES",
),
(
IndirectCommandsLayoutUsageFlagsNVX::SPARSE_SEQUENCES.0,
"SPARSE_SEQUENCES",
),
(
IndirectCommandsLayoutUsageFlagsNVX::EMPTY_EXECUTIONS.0,
"EMPTY_EXECUTIONS",
),
(
IndirectCommandsLayoutUsageFlagsNVX::INDEXED_SEQUENCES.0,
"INDEXED_SEQUENCES",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for IndirectCommandsTokenTypeNVX {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::PIPELINE => Some("PIPELINE"),
Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
Self::INDEX_BUFFER => Some("INDEX_BUFFER"),
Self::VERTEX_BUFFER => Some("VERTEX_BUFFER"),
Self::PUSH_CONSTANT => Some("PUSH_CONSTANT"),
Self::DRAW_INDEXED => Some("DRAW_INDEXED"),
Self::DRAW => Some("DRAW"),
Self::DISPATCH => Some("DISPATCH"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for InstanceCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for InternalAllocationType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::EXECUTABLE => Some("EXECUTABLE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for LogicOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::CLEAR => Some("CLEAR"),
Self::AND => Some("AND"),
Self::AND_REVERSE => Some("AND_REVERSE"),
Self::COPY => Some("COPY"),
Self::AND_INVERTED => Some("AND_INVERTED"),
Self::NO_OP => Some("NO_OP"),
Self::XOR => Some("XOR"),
Self::OR => Some("OR"),
Self::NOR => Some("NOR"),
Self::EQUIVALENT => Some("EQUIVALENT"),
Self::INVERT => Some("INVERT"),
Self::OR_REVERSE => Some("OR_REVERSE"),
Self::COPY_INVERTED => Some("COPY_INVERTED"),
Self::OR_INVERTED => Some("OR_INVERTED"),
Self::NAND => Some("NAND"),
Self::SET => Some("SET"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for MacOSSurfaceCreateFlagsMVK {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for MemoryAllocateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(MemoryAllocateFlags::DEVICE_MASK.0, "DEVICE_MASK")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for MemoryHeapFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(MemoryHeapFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"),
(MemoryHeapFlags::MULTI_INSTANCE.0, "MULTI_INSTANCE"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for MemoryMapFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for MemoryOverallocationBehaviorAMD {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DEFAULT => Some("DEFAULT"),
Self::ALLOWED => Some("ALLOWED"),
Self::DISALLOWED => Some("DISALLOWED"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for MemoryPropertyFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(MemoryPropertyFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"),
(MemoryPropertyFlags::HOST_VISIBLE.0, "HOST_VISIBLE"),
(MemoryPropertyFlags::HOST_COHERENT.0, "HOST_COHERENT"),
(MemoryPropertyFlags::HOST_CACHED.0, "HOST_CACHED"),
(MemoryPropertyFlags::LAZILY_ALLOCATED.0, "LAZILY_ALLOCATED"),
(MemoryPropertyFlags::PROTECTED.0, "PROTECTED"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ObjectEntryTypeNVX {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
Self::PIPELINE => Some("PIPELINE"),
Self::INDEX_BUFFER => Some("INDEX_BUFFER"),
Self::VERTEX_BUFFER => Some("VERTEX_BUFFER"),
Self::PUSH_CONSTANT => Some("PUSH_CONSTANT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ObjectEntryUsageFlagsNVX {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ObjectEntryUsageFlagsNVX::GRAPHICS.0, "GRAPHICS"),
(ObjectEntryUsageFlagsNVX::COMPUTE.0, "COMPUTE"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ObjectType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UNKNOWN => Some("UNKNOWN"),
Self::INSTANCE => Some("INSTANCE"),
Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"),
Self::DEVICE => Some("DEVICE"),
Self::QUEUE => Some("QUEUE"),
Self::SEMAPHORE => Some("SEMAPHORE"),
Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"),
Self::FENCE => Some("FENCE"),
Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"),
Self::BUFFER => Some("BUFFER"),
Self::IMAGE => Some("IMAGE"),
Self::EVENT => Some("EVENT"),
Self::QUERY_POOL => Some("QUERY_POOL"),
Self::BUFFER_VIEW => Some("BUFFER_VIEW"),
Self::IMAGE_VIEW => Some("IMAGE_VIEW"),
Self::SHADER_MODULE => Some("SHADER_MODULE"),
Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"),
Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"),
Self::RENDER_PASS => Some("RENDER_PASS"),
Self::PIPELINE => Some("PIPELINE"),
Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"),
Self::SAMPLER => Some("SAMPLER"),
Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"),
Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
Self::FRAMEBUFFER => Some("FRAMEBUFFER"),
Self::COMMAND_POOL => Some("COMMAND_POOL"),
Self::SURFACE_KHR => Some("SURFACE_KHR"),
Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"),
Self::DISPLAY_KHR => Some("DISPLAY_KHR"),
Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"),
Self::DEBUG_REPORT_CALLBACK_EXT => Some("DEBUG_REPORT_CALLBACK_EXT"),
Self::OBJECT_TABLE_NVX => Some("OBJECT_TABLE_NVX"),
Self::INDIRECT_COMMANDS_LAYOUT_NVX => Some("INDIRECT_COMMANDS_LAYOUT_NVX"),
Self::DEBUG_UTILS_MESSENGER_EXT => Some("DEBUG_UTILS_MESSENGER_EXT"),
Self::VALIDATION_CACHE_EXT => Some("VALIDATION_CACHE_EXT"),
Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"),
Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PeerMemoryFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(PeerMemoryFeatureFlags::COPY_SRC.0, "COPY_SRC"),
(PeerMemoryFeatureFlags::COPY_DST.0, "COPY_DST"),
(PeerMemoryFeatureFlags::GENERIC_SRC.0, "GENERIC_SRC"),
(PeerMemoryFeatureFlags::GENERIC_DST.0, "GENERIC_DST"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PhysicalDeviceType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::OTHER => Some("OTHER"),
Self::INTEGRATED_GPU => Some("INTEGRATED_GPU"),
Self::DISCRETE_GPU => Some("DISCRETE_GPU"),
Self::VIRTUAL_GPU => Some("VIRTUAL_GPU"),
Self::CPU => Some("CPU"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PipelineBindPoint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::GRAPHICS => Some("GRAPHICS"),
Self::COMPUTE => Some("COMPUTE"),
Self::RAY_TRACING_NV => Some("RAY_TRACING_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PipelineCacheCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineCacheHeaderVersion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ONE => Some("ONE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PipelineColorBlendStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineCoverageModulationStateCreateFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineCoverageToColorStateCreateFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
PipelineCreateFlags::DISABLE_OPTIMIZATION.0,
"DISABLE_OPTIMIZATION",
),
(
PipelineCreateFlags::ALLOW_DERIVATIVES.0,
"ALLOW_DERIVATIVES",
),
(PipelineCreateFlags::DERIVATIVE.0, "DERIVATIVE"),
(PipelineCreateFlags::DEFER_COMPILE_NV.0, "DEFER_COMPILE_NV"),
(
PipelineCreateFlags::VIEW_INDEX_FROM_DEVICE_INDEX.0,
"VIEW_INDEX_FROM_DEVICE_INDEX",
),
(PipelineCreateFlags::DISPATCH_BASE.0, "DISPATCH_BASE"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineDepthStencilStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineDiscardRectangleStateCreateFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineDynamicStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineInputAssemblyStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineLayoutCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineMultisampleStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineRasterizationConservativeStateCreateFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineRasterizationStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineRasterizationStateStreamCreateFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineShaderStageCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineStageFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(PipelineStageFlags::TOP_OF_PIPE.0, "TOP_OF_PIPE"),
(PipelineStageFlags::DRAW_INDIRECT.0, "DRAW_INDIRECT"),
(PipelineStageFlags::VERTEX_INPUT.0, "VERTEX_INPUT"),
(PipelineStageFlags::VERTEX_SHADER.0, "VERTEX_SHADER"),
(
PipelineStageFlags::TESSELLATION_CONTROL_SHADER.0,
"TESSELLATION_CONTROL_SHADER",
),
(
PipelineStageFlags::TESSELLATION_EVALUATION_SHADER.0,
"TESSELLATION_EVALUATION_SHADER",
),
(PipelineStageFlags::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"),
(PipelineStageFlags::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"),
(
PipelineStageFlags::EARLY_FRAGMENT_TESTS.0,
"EARLY_FRAGMENT_TESTS",
),
(
PipelineStageFlags::LATE_FRAGMENT_TESTS.0,
"LATE_FRAGMENT_TESTS",
),
(
PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT.0,
"COLOR_ATTACHMENT_OUTPUT",
),
(PipelineStageFlags::COMPUTE_SHADER.0, "COMPUTE_SHADER"),
(PipelineStageFlags::TRANSFER.0, "TRANSFER"),
(PipelineStageFlags::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"),
(PipelineStageFlags::HOST.0, "HOST"),
(PipelineStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"),
(PipelineStageFlags::ALL_COMMANDS.0, "ALL_COMMANDS"),
(PipelineStageFlags::RESERVED_27_KHR.0, "RESERVED_27_KHR"),
(PipelineStageFlags::RESERVED_26_KHR.0, "RESERVED_26_KHR"),
(
PipelineStageFlags::TRANSFORM_FEEDBACK_EXT.0,
"TRANSFORM_FEEDBACK_EXT",
),
(
PipelineStageFlags::CONDITIONAL_RENDERING_EXT.0,
"CONDITIONAL_RENDERING_EXT",
),
(
PipelineStageFlags::COMMAND_PROCESS_NVX.0,
"COMMAND_PROCESS_NVX",
),
(
PipelineStageFlags::SHADING_RATE_IMAGE_NV.0,
"SHADING_RATE_IMAGE_NV",
),
(
PipelineStageFlags::RAY_TRACING_SHADER_NV.0,
"RAY_TRACING_SHADER_NV",
),
(
PipelineStageFlags::ACCELERATION_STRUCTURE_BUILD_NV.0,
"ACCELERATION_STRUCTURE_BUILD_NV",
),
(PipelineStageFlags::TASK_SHADER_NV.0, "TASK_SHADER_NV"),
(PipelineStageFlags::MESH_SHADER_NV.0, "MESH_SHADER_NV"),
(
PipelineStageFlags::FRAGMENT_DENSITY_PROCESS_EXT.0,
"FRAGMENT_DENSITY_PROCESS_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineTessellationStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineVertexInputStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineViewportStateCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PipelineViewportSwizzleStateCreateFlagsNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for PointClippingBehavior {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ALL_CLIP_PLANES => Some("ALL_CLIP_PLANES"),
Self::USER_CLIP_PLANES_ONLY => Some("USER_CLIP_PLANES_ONLY"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PolygonMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::FILL => Some("FILL"),
Self::LINE => Some("LINE"),
Self::POINT => Some("POINT"),
Self::FILL_RECTANGLE_NV => Some("FILL_RECTANGLE_NV"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PresentModeKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::IMMEDIATE => Some("IMMEDIATE"),
Self::MAILBOX => Some("MAILBOX"),
Self::FIFO => Some("FIFO"),
Self::FIFO_RELAXED => Some("FIFO_RELAXED"),
Self::SHARED_DEMAND_REFRESH => Some("SHARED_DEMAND_REFRESH"),
Self::SHARED_CONTINUOUS_REFRESH => Some("SHARED_CONTINUOUS_REFRESH"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for PrimitiveTopology {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::POINT_LIST => Some("POINT_LIST"),
Self::LINE_LIST => Some("LINE_LIST"),
Self::LINE_STRIP => Some("LINE_STRIP"),
Self::TRIANGLE_LIST => Some("TRIANGLE_LIST"),
Self::TRIANGLE_STRIP => Some("TRIANGLE_STRIP"),
Self::TRIANGLE_FAN => Some("TRIANGLE_FAN"),
Self::LINE_LIST_WITH_ADJACENCY => Some("LINE_LIST_WITH_ADJACENCY"),
Self::LINE_STRIP_WITH_ADJACENCY => Some("LINE_STRIP_WITH_ADJACENCY"),
Self::TRIANGLE_LIST_WITH_ADJACENCY => Some("TRIANGLE_LIST_WITH_ADJACENCY"),
Self::TRIANGLE_STRIP_WITH_ADJACENCY => Some("TRIANGLE_STRIP_WITH_ADJACENCY"),
Self::PATCH_LIST => Some("PATCH_LIST"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for QueryControlFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(QueryControlFlags::PRECISE.0, "PRECISE")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for QueryPipelineStatisticFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
QueryPipelineStatisticFlags::INPUT_ASSEMBLY_VERTICES.0,
"INPUT_ASSEMBLY_VERTICES",
),
(
QueryPipelineStatisticFlags::INPUT_ASSEMBLY_PRIMITIVES.0,
"INPUT_ASSEMBLY_PRIMITIVES",
),
(
QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS.0,
"VERTEX_SHADER_INVOCATIONS",
),
(
QueryPipelineStatisticFlags::GEOMETRY_SHADER_INVOCATIONS.0,
"GEOMETRY_SHADER_INVOCATIONS",
),
(
QueryPipelineStatisticFlags::GEOMETRY_SHADER_PRIMITIVES.0,
"GEOMETRY_SHADER_PRIMITIVES",
),
(
QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS.0,
"CLIPPING_INVOCATIONS",
),
(
QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES.0,
"CLIPPING_PRIMITIVES",
),
(
QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS.0,
"FRAGMENT_SHADER_INVOCATIONS",
),
(
QueryPipelineStatisticFlags::TESSELLATION_CONTROL_SHADER_PATCHES.0,
"TESSELLATION_CONTROL_SHADER_PATCHES",
),
(
QueryPipelineStatisticFlags::TESSELLATION_EVALUATION_SHADER_INVOCATIONS.0,
"TESSELLATION_EVALUATION_SHADER_INVOCATIONS",
),
(
QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS.0,
"COMPUTE_SHADER_INVOCATIONS",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for QueryPoolCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for QueryResultFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(QueryResultFlags::TYPE_64.0, "TYPE_64"),
(QueryResultFlags::WAIT.0, "WAIT"),
(QueryResultFlags::WITH_AVAILABILITY.0, "WITH_AVAILABILITY"),
(QueryResultFlags::PARTIAL.0, "PARTIAL"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for QueryType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::OCCLUSION => Some("OCCLUSION"),
Self::PIPELINE_STATISTICS => Some("PIPELINE_STATISTICS"),
Self::TIMESTAMP => Some("TIMESTAMP"),
Self::RESERVED_8 => Some("RESERVED_8"),
Self::RESERVED_4 => Some("RESERVED_4"),
Self::TRANSFORM_FEEDBACK_STREAM_EXT => Some("TRANSFORM_FEEDBACK_STREAM_EXT"),
Self::ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV => {
Some("ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV")
}
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for QueueFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(QueueFlags::GRAPHICS.0, "GRAPHICS"),
(QueueFlags::COMPUTE.0, "COMPUTE"),
(QueueFlags::TRANSFER.0, "TRANSFER"),
(QueueFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
(QueueFlags::RESERVED_6_KHR.0, "RESERVED_6_KHR"),
(QueueFlags::RESERVED_5_KHR.0, "RESERVED_5_KHR"),
(QueueFlags::PROTECTED.0, "PROTECTED"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for QueueGlobalPriorityEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::LOW => Some("LOW"),
Self::MEDIUM => Some("MEDIUM"),
Self::HIGH => Some("HIGH"),
Self::REALTIME => Some("REALTIME"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for RasterizationOrderAMD {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::STRICT => Some("STRICT"),
Self::RELAXED => Some("RELAXED"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for RayTracingShaderGroupTypeNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::GENERAL => Some("GENERAL"),
Self::TRIANGLES_HIT_GROUP => Some("TRIANGLES_HIT_GROUP"),
Self::PROCEDURAL_HIT_GROUP => Some("PROCEDURAL_HIT_GROUP"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for RenderPassCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] =
&[(RenderPassCreateFlags::RESERVED_0_KHR.0, "RESERVED_0_KHR")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ResolveModeFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ResolveModeFlagsKHR::NONE.0, "NONE"),
(ResolveModeFlagsKHR::SAMPLE_ZERO.0, "SAMPLE_ZERO"),
(ResolveModeFlagsKHR::AVERAGE.0, "AVERAGE"),
(ResolveModeFlagsKHR::MIN.0, "MIN"),
(ResolveModeFlagsKHR::MAX.0, "MAX"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for Result {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::SUCCESS => Some("SUCCESS"),
Self::NOT_READY => Some("NOT_READY"),
Self::TIMEOUT => Some("TIMEOUT"),
Self::EVENT_SET => Some("EVENT_SET"),
Self::EVENT_RESET => Some("EVENT_RESET"),
Self::INCOMPLETE => Some("INCOMPLETE"),
Self::ERROR_OUT_OF_HOST_MEMORY => Some("ERROR_OUT_OF_HOST_MEMORY"),
Self::ERROR_OUT_OF_DEVICE_MEMORY => Some("ERROR_OUT_OF_DEVICE_MEMORY"),
Self::ERROR_INITIALIZATION_FAILED => Some("ERROR_INITIALIZATION_FAILED"),
Self::ERROR_DEVICE_LOST => Some("ERROR_DEVICE_LOST"),
Self::ERROR_MEMORY_MAP_FAILED => Some("ERROR_MEMORY_MAP_FAILED"),
Self::ERROR_LAYER_NOT_PRESENT => Some("ERROR_LAYER_NOT_PRESENT"),
Self::ERROR_EXTENSION_NOT_PRESENT => Some("ERROR_EXTENSION_NOT_PRESENT"),
Self::ERROR_FEATURE_NOT_PRESENT => Some("ERROR_FEATURE_NOT_PRESENT"),
Self::ERROR_INCOMPATIBLE_DRIVER => Some("ERROR_INCOMPATIBLE_DRIVER"),
Self::ERROR_TOO_MANY_OBJECTS => Some("ERROR_TOO_MANY_OBJECTS"),
Self::ERROR_FORMAT_NOT_SUPPORTED => Some("ERROR_FORMAT_NOT_SUPPORTED"),
Self::ERROR_FRAGMENTED_POOL => Some("ERROR_FRAGMENTED_POOL"),
Self::ERROR_SURFACE_LOST_KHR => Some("ERROR_SURFACE_LOST_KHR"),
Self::ERROR_NATIVE_WINDOW_IN_USE_KHR => Some("ERROR_NATIVE_WINDOW_IN_USE_KHR"),
Self::SUBOPTIMAL_KHR => Some("SUBOPTIMAL_KHR"),
Self::ERROR_OUT_OF_DATE_KHR => Some("ERROR_OUT_OF_DATE_KHR"),
Self::ERROR_INCOMPATIBLE_DISPLAY_KHR => Some("ERROR_INCOMPATIBLE_DISPLAY_KHR"),
Self::ERROR_VALIDATION_FAILED_EXT => Some("ERROR_VALIDATION_FAILED_EXT"),
Self::ERROR_INVALID_SHADER_NV => Some("ERROR_INVALID_SHADER_NV"),
Self::ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT => {
Some("ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT")
}
Self::ERROR_FRAGMENTATION_EXT => Some("ERROR_FRAGMENTATION_EXT"),
Self::ERROR_NOT_PERMITTED_EXT => Some("ERROR_NOT_PERMITTED_EXT"),
Self::ERROR_INVALID_DEVICE_ADDRESS_EXT => Some("ERROR_INVALID_DEVICE_ADDRESS_EXT"),
Self::ERROR_OUT_OF_POOL_MEMORY => Some("ERROR_OUT_OF_POOL_MEMORY"),
Self::ERROR_INVALID_EXTERNAL_HANDLE => Some("ERROR_INVALID_EXTERNAL_HANDLE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SampleCountFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(SampleCountFlags::TYPE_1.0, "TYPE_1"),
(SampleCountFlags::TYPE_2.0, "TYPE_2"),
(SampleCountFlags::TYPE_4.0, "TYPE_4"),
(SampleCountFlags::TYPE_8.0, "TYPE_8"),
(SampleCountFlags::TYPE_16.0, "TYPE_16"),
(SampleCountFlags::TYPE_32.0, "TYPE_32"),
(SampleCountFlags::TYPE_64.0, "TYPE_64"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SamplerAddressMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::REPEAT => Some("REPEAT"),
Self::MIRRORED_REPEAT => Some("MIRRORED_REPEAT"),
Self::CLAMP_TO_EDGE => Some("CLAMP_TO_EDGE"),
Self::CLAMP_TO_BORDER => Some("CLAMP_TO_BORDER"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SamplerCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(SamplerCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"),
(
SamplerCreateFlags::SUBSAMPLED_COARSE_RECONSTRUCTION_EXT.0,
"SUBSAMPLED_COARSE_RECONSTRUCTION_EXT",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SamplerMipmapMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::NEAREST => Some("NEAREST"),
Self::LINEAR => Some("LINEAR"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SamplerReductionModeEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::WEIGHTED_AVERAGE => Some("WEIGHTED_AVERAGE"),
Self::MIN => Some("MIN"),
Self::MAX => Some("MAX"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SamplerYcbcrModelConversion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::RGB_IDENTITY => Some("RGB_IDENTITY"),
Self::YCBCR_IDENTITY => Some("YCBCR_IDENTITY"),
Self::YCBCR_709 => Some("YCBCR_709"),
Self::YCBCR_601 => Some("YCBCR_601"),
Self::YCBCR_2020 => Some("YCBCR_2020"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SamplerYcbcrRange {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ITU_FULL => Some("ITU_FULL"),
Self::ITU_NARROW => Some("ITU_NARROW"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SemaphoreCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SemaphoreImportFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(SemaphoreImportFlags::TEMPORARY.0, "TEMPORARY")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ShaderInfoTypeAMD {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::STATISTICS => Some("STATISTICS"),
Self::BINARY => Some("BINARY"),
Self::DISASSEMBLY => Some("DISASSEMBLY"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ShaderModuleCreateFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ShaderStageFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(ShaderStageFlags::VERTEX.0, "VERTEX"),
(
ShaderStageFlags::TESSELLATION_CONTROL.0,
"TESSELLATION_CONTROL",
),
(
ShaderStageFlags::TESSELLATION_EVALUATION.0,
"TESSELLATION_EVALUATION",
),
(ShaderStageFlags::GEOMETRY.0, "GEOMETRY"),
(ShaderStageFlags::FRAGMENT.0, "FRAGMENT"),
(ShaderStageFlags::COMPUTE.0, "COMPUTE"),
(ShaderStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"),
(ShaderStageFlags::ALL.0, "ALL"),
(ShaderStageFlags::RAYGEN_NV.0, "RAYGEN_NV"),
(ShaderStageFlags::ANY_HIT_NV.0, "ANY_HIT_NV"),
(ShaderStageFlags::CLOSEST_HIT_NV.0, "CLOSEST_HIT_NV"),
(ShaderStageFlags::MISS_NV.0, "MISS_NV"),
(ShaderStageFlags::INTERSECTION_NV.0, "INTERSECTION_NV"),
(ShaderStageFlags::CALLABLE_NV.0, "CALLABLE_NV"),
(ShaderStageFlags::TASK_NV.0, "TASK_NV"),
(ShaderStageFlags::MESH_NV.0, "MESH_NV"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ShadingRatePaletteEntryNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::NO_INVOCATIONS => Some("NO_INVOCATIONS"),
Self::TYPE_16_INVOCATIONS_PER_PIXEL => Some("TYPE_16_INVOCATIONS_PER_PIXEL"),
Self::TYPE_8_INVOCATIONS_PER_PIXEL => Some("TYPE_8_INVOCATIONS_PER_PIXEL"),
Self::TYPE_4_INVOCATIONS_PER_PIXEL => Some("TYPE_4_INVOCATIONS_PER_PIXEL"),
Self::TYPE_2_INVOCATIONS_PER_PIXEL => Some("TYPE_2_INVOCATIONS_PER_PIXEL"),
Self::TYPE_1_INVOCATION_PER_PIXEL => Some("TYPE_1_INVOCATION_PER_PIXEL"),
Self::TYPE_1_INVOCATION_PER_2X1_PIXELS => Some("TYPE_1_INVOCATION_PER_2X1_PIXELS"),
Self::TYPE_1_INVOCATION_PER_1X2_PIXELS => Some("TYPE_1_INVOCATION_PER_1X2_PIXELS"),
Self::TYPE_1_INVOCATION_PER_2X2_PIXELS => Some("TYPE_1_INVOCATION_PER_2X2_PIXELS"),
Self::TYPE_1_INVOCATION_PER_4X2_PIXELS => Some("TYPE_1_INVOCATION_PER_4X2_PIXELS"),
Self::TYPE_1_INVOCATION_PER_2X4_PIXELS => Some("TYPE_1_INVOCATION_PER_2X4_PIXELS"),
Self::TYPE_1_INVOCATION_PER_4X4_PIXELS => Some("TYPE_1_INVOCATION_PER_4X4_PIXELS"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SharingMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::EXCLUSIVE => Some("EXCLUSIVE"),
Self::CONCURRENT => Some("CONCURRENT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SparseImageFormatFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(SparseImageFormatFlags::SINGLE_MIPTAIL.0, "SINGLE_MIPTAIL"),
(
SparseImageFormatFlags::ALIGNED_MIP_SIZE.0,
"ALIGNED_MIP_SIZE",
),
(
SparseImageFormatFlags::NONSTANDARD_BLOCK_SIZE.0,
"NONSTANDARD_BLOCK_SIZE",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SparseMemoryBindFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(SparseMemoryBindFlags::METADATA.0, "METADATA")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for StencilFaceFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(StencilFaceFlags::FRONT.0, "FRONT"),
(StencilFaceFlags::BACK.0, "BACK"),
(
StencilFaceFlags::STENCIL_FRONT_AND_BACK.0,
"STENCIL_FRONT_AND_BACK",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for StencilOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::KEEP => Some("KEEP"),
Self::ZERO => Some("ZERO"),
Self::REPLACE => Some("REPLACE"),
Self::INCREMENT_AND_CLAMP => Some("INCREMENT_AND_CLAMP"),
Self::DECREMENT_AND_CLAMP => Some("DECREMENT_AND_CLAMP"),
Self::INVERT => Some("INVERT"),
Self::INCREMENT_AND_WRAP => Some("INCREMENT_AND_WRAP"),
Self::DECREMENT_AND_WRAP => Some("DECREMENT_AND_WRAP"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for StructureType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::APPLICATION_INFO => Some("APPLICATION_INFO"),
Self::INSTANCE_CREATE_INFO => Some("INSTANCE_CREATE_INFO"),
Self::DEVICE_QUEUE_CREATE_INFO => Some("DEVICE_QUEUE_CREATE_INFO"),
Self::DEVICE_CREATE_INFO => Some("DEVICE_CREATE_INFO"),
Self::SUBMIT_INFO => Some("SUBMIT_INFO"),
Self::MEMORY_ALLOCATE_INFO => Some("MEMORY_ALLOCATE_INFO"),
Self::MAPPED_MEMORY_RANGE => Some("MAPPED_MEMORY_RANGE"),
Self::BIND_SPARSE_INFO => Some("BIND_SPARSE_INFO"),
Self::FENCE_CREATE_INFO => Some("FENCE_CREATE_INFO"),
Self::SEMAPHORE_CREATE_INFO => Some("SEMAPHORE_CREATE_INFO"),
Self::EVENT_CREATE_INFO => Some("EVENT_CREATE_INFO"),
Self::QUERY_POOL_CREATE_INFO => Some("QUERY_POOL_CREATE_INFO"),
Self::BUFFER_CREATE_INFO => Some("BUFFER_CREATE_INFO"),
Self::BUFFER_VIEW_CREATE_INFO => Some("BUFFER_VIEW_CREATE_INFO"),
Self::IMAGE_CREATE_INFO => Some("IMAGE_CREATE_INFO"),
Self::IMAGE_VIEW_CREATE_INFO => Some("IMAGE_VIEW_CREATE_INFO"),
Self::SHADER_MODULE_CREATE_INFO => Some("SHADER_MODULE_CREATE_INFO"),
Self::PIPELINE_CACHE_CREATE_INFO => Some("PIPELINE_CACHE_CREATE_INFO"),
Self::PIPELINE_SHADER_STAGE_CREATE_INFO => Some("PIPELINE_SHADER_STAGE_CREATE_INFO"),
Self::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO => {
Some("PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO")
}
Self::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO => {
Some("PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO")
}
Self::PIPELINE_TESSELLATION_STATE_CREATE_INFO => {
Some("PIPELINE_TESSELLATION_STATE_CREATE_INFO")
}
Self::PIPELINE_VIEWPORT_STATE_CREATE_INFO => {
Some("PIPELINE_VIEWPORT_STATE_CREATE_INFO")
}
Self::PIPELINE_RASTERIZATION_STATE_CREATE_INFO => {
Some("PIPELINE_RASTERIZATION_STATE_CREATE_INFO")
}
Self::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO => {
Some("PIPELINE_MULTISAMPLE_STATE_CREATE_INFO")
}
Self::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO => {
Some("PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO")
}
Self::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO => {
Some("PIPELINE_COLOR_BLEND_STATE_CREATE_INFO")
}
Self::PIPELINE_DYNAMIC_STATE_CREATE_INFO => Some("PIPELINE_DYNAMIC_STATE_CREATE_INFO"),
Self::GRAPHICS_PIPELINE_CREATE_INFO => Some("GRAPHICS_PIPELINE_CREATE_INFO"),
Self::COMPUTE_PIPELINE_CREATE_INFO => Some("COMPUTE_PIPELINE_CREATE_INFO"),
Self::PIPELINE_LAYOUT_CREATE_INFO => Some("PIPELINE_LAYOUT_CREATE_INFO"),
Self::SAMPLER_CREATE_INFO => Some("SAMPLER_CREATE_INFO"),
Self::DESCRIPTOR_SET_LAYOUT_CREATE_INFO => Some("DESCRIPTOR_SET_LAYOUT_CREATE_INFO"),
Self::DESCRIPTOR_POOL_CREATE_INFO => Some("DESCRIPTOR_POOL_CREATE_INFO"),
Self::DESCRIPTOR_SET_ALLOCATE_INFO => Some("DESCRIPTOR_SET_ALLOCATE_INFO"),
Self::WRITE_DESCRIPTOR_SET => Some("WRITE_DESCRIPTOR_SET"),
Self::COPY_DESCRIPTOR_SET => Some("COPY_DESCRIPTOR_SET"),
Self::FRAMEBUFFER_CREATE_INFO => Some("FRAMEBUFFER_CREATE_INFO"),
Self::RENDER_PASS_CREATE_INFO => Some("RENDER_PASS_CREATE_INFO"),
Self::COMMAND_POOL_CREATE_INFO => Some("COMMAND_POOL_CREATE_INFO"),
Self::COMMAND_BUFFER_ALLOCATE_INFO => Some("COMMAND_BUFFER_ALLOCATE_INFO"),
Self::COMMAND_BUFFER_INHERITANCE_INFO => Some("COMMAND_BUFFER_INHERITANCE_INFO"),
Self::COMMAND_BUFFER_BEGIN_INFO => Some("COMMAND_BUFFER_BEGIN_INFO"),
Self::RENDER_PASS_BEGIN_INFO => Some("RENDER_PASS_BEGIN_INFO"),
Self::BUFFER_MEMORY_BARRIER => Some("BUFFER_MEMORY_BARRIER"),
Self::IMAGE_MEMORY_BARRIER => Some("IMAGE_MEMORY_BARRIER"),
Self::MEMORY_BARRIER => Some("MEMORY_BARRIER"),
Self::LOADER_INSTANCE_CREATE_INFO => Some("LOADER_INSTANCE_CREATE_INFO"),
Self::LOADER_DEVICE_CREATE_INFO => Some("LOADER_DEVICE_CREATE_INFO"),
Self::SWAPCHAIN_CREATE_INFO_KHR => Some("SWAPCHAIN_CREATE_INFO_KHR"),
Self::PRESENT_INFO_KHR => Some("PRESENT_INFO_KHR"),
Self::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR => {
Some("DEVICE_GROUP_PRESENT_CAPABILITIES_KHR")
}
Self::IMAGE_SWAPCHAIN_CREATE_INFO_KHR => Some("IMAGE_SWAPCHAIN_CREATE_INFO_KHR"),
Self::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR => {
Some("BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR")
}
Self::ACQUIRE_NEXT_IMAGE_INFO_KHR => Some("ACQUIRE_NEXT_IMAGE_INFO_KHR"),
Self::DEVICE_GROUP_PRESENT_INFO_KHR => Some("DEVICE_GROUP_PRESENT_INFO_KHR"),
Self::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR => {
Some("DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR")
}
Self::DISPLAY_MODE_CREATE_INFO_KHR => Some("DISPLAY_MODE_CREATE_INFO_KHR"),
Self::DISPLAY_SURFACE_CREATE_INFO_KHR => Some("DISPLAY_SURFACE_CREATE_INFO_KHR"),
Self::DISPLAY_PRESENT_INFO_KHR => Some("DISPLAY_PRESENT_INFO_KHR"),
Self::XLIB_SURFACE_CREATE_INFO_KHR => Some("XLIB_SURFACE_CREATE_INFO_KHR"),
Self::XCB_SURFACE_CREATE_INFO_KHR => Some("XCB_SURFACE_CREATE_INFO_KHR"),
Self::WAYLAND_SURFACE_CREATE_INFO_KHR => Some("WAYLAND_SURFACE_CREATE_INFO_KHR"),
Self::ANDROID_SURFACE_CREATE_INFO_KHR => Some("ANDROID_SURFACE_CREATE_INFO_KHR"),
Self::WIN32_SURFACE_CREATE_INFO_KHR => Some("WIN32_SURFACE_CREATE_INFO_KHR"),
Self::NATIVE_BUFFER_ANDROID => Some("NATIVE_BUFFER_ANDROID"),
Self::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT => {
Some("DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT")
}
Self::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD => {
Some("PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD")
}
Self::DEBUG_MARKER_OBJECT_NAME_INFO_EXT => Some("DEBUG_MARKER_OBJECT_NAME_INFO_EXT"),
Self::DEBUG_MARKER_OBJECT_TAG_INFO_EXT => Some("DEBUG_MARKER_OBJECT_TAG_INFO_EXT"),
Self::DEBUG_MARKER_MARKER_INFO_EXT => Some("DEBUG_MARKER_MARKER_INFO_EXT"),
Self::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV => {
Some("DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV")
}
Self::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV => {
Some("DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV")
}
Self::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV => {
Some("DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV")
}
Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT")
}
Self::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT => {
Some("PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT")
}
Self::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD => {
Some("TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD")
}
Self::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV => {
Some("PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV")
}
Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV => {
Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV")
}
Self::EXPORT_MEMORY_ALLOCATE_INFO_NV => Some("EXPORT_MEMORY_ALLOCATE_INFO_NV"),
Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"),
Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"),
Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV => {
Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV")
}
Self::VALIDATION_FLAGS_EXT => Some("VALIDATION_FLAGS_EXT"),
Self::VI_SURFACE_CREATE_INFO_NN => Some("VI_SURFACE_CREATE_INFO_NN"),
Self::IMAGE_VIEW_ASTC_DECODE_MODE_EXT => Some("IMAGE_VIEW_ASTC_DECODE_MODE_EXT"),
Self::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT")
}
Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR => {
Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR")
}
Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR => {
Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR")
}
Self::MEMORY_WIN32_HANDLE_PROPERTIES_KHR => Some("MEMORY_WIN32_HANDLE_PROPERTIES_KHR"),
Self::MEMORY_GET_WIN32_HANDLE_INFO_KHR => Some("MEMORY_GET_WIN32_HANDLE_INFO_KHR"),
Self::IMPORT_MEMORY_FD_INFO_KHR => Some("IMPORT_MEMORY_FD_INFO_KHR"),
Self::MEMORY_FD_PROPERTIES_KHR => Some("MEMORY_FD_PROPERTIES_KHR"),
Self::MEMORY_GET_FD_INFO_KHR => Some("MEMORY_GET_FD_INFO_KHR"),
Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR => {
Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR")
}
Self::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => {
Some("IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR")
}
Self::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => {
Some("EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR")
}
Self::D3D12_FENCE_SUBMIT_INFO_KHR => Some("D3D12_FENCE_SUBMIT_INFO_KHR"),
Self::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR => {
Some("SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR")
}
Self::IMPORT_SEMAPHORE_FD_INFO_KHR => Some("IMPORT_SEMAPHORE_FD_INFO_KHR"),
Self::SEMAPHORE_GET_FD_INFO_KHR => Some("SEMAPHORE_GET_FD_INFO_KHR"),
Self::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR => {
Some("PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR")
}
Self::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT => {
Some("COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT")
}
Self::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT")
}
Self::CONDITIONAL_RENDERING_BEGIN_INFO_EXT => {
Some("CONDITIONAL_RENDERING_BEGIN_INFO_EXT")
}
Self::PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR => {
Some("PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR")
}
Self::PRESENT_REGIONS_KHR => Some("PRESENT_REGIONS_KHR"),
Self::OBJECT_TABLE_CREATE_INFO_NVX => Some("OBJECT_TABLE_CREATE_INFO_NVX"),
Self::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX => {
Some("INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX")
}
Self::CMD_PROCESS_COMMANDS_INFO_NVX => Some("CMD_PROCESS_COMMANDS_INFO_NVX"),
Self::CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX => {
Some("CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX")
}
Self::DEVICE_GENERATED_COMMANDS_LIMITS_NVX => {
Some("DEVICE_GENERATED_COMMANDS_LIMITS_NVX")
}
Self::DEVICE_GENERATED_COMMANDS_FEATURES_NVX => {
Some("DEVICE_GENERATED_COMMANDS_FEATURES_NVX")
}
Self::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV => {
Some("PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV")
}
Self::SURFACE_CAPABILITIES_2_EXT => Some("SURFACE_CAPABILITIES_2_EXT"),
Self::DISPLAY_POWER_INFO_EXT => Some("DISPLAY_POWER_INFO_EXT"),
Self::DEVICE_EVENT_INFO_EXT => Some("DEVICE_EVENT_INFO_EXT"),
Self::DISPLAY_EVENT_INFO_EXT => Some("DISPLAY_EVENT_INFO_EXT"),
Self::SWAPCHAIN_COUNTER_CREATE_INFO_EXT => Some("SWAPCHAIN_COUNTER_CREATE_INFO_EXT"),
Self::PRESENT_TIMES_INFO_GOOGLE => Some("PRESENT_TIMES_INFO_GOOGLE"),
Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX => {
Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX")
}
Self::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV => {
Some("PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV")
}
Self::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT")
}
Self::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT => {
Some("PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT")
}
Self::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT => {
Some("PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT")
}
Self::HDR_METADATA_EXT => Some("HDR_METADATA_EXT"),
Self::ATTACHMENT_DESCRIPTION_2_KHR => Some("ATTACHMENT_DESCRIPTION_2_KHR"),
Self::ATTACHMENT_REFERENCE_2_KHR => Some("ATTACHMENT_REFERENCE_2_KHR"),
Self::SUBPASS_DESCRIPTION_2_KHR => Some("SUBPASS_DESCRIPTION_2_KHR"),
Self::SUBPASS_DEPENDENCY_2_KHR => Some("SUBPASS_DEPENDENCY_2_KHR"),
Self::RENDER_PASS_CREATE_INFO_2_KHR => Some("RENDER_PASS_CREATE_INFO_2_KHR"),
Self::SUBPASS_BEGIN_INFO_KHR => Some("SUBPASS_BEGIN_INFO_KHR"),
Self::SUBPASS_END_INFO_KHR => Some("SUBPASS_END_INFO_KHR"),
Self::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR => {
Some("SHARED_PRESENT_SURFACE_CAPABILITIES_KHR")
}
Self::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"),
Self::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"),
Self::FENCE_GET_WIN32_HANDLE_INFO_KHR => Some("FENCE_GET_WIN32_HANDLE_INFO_KHR"),
Self::IMPORT_FENCE_FD_INFO_KHR => Some("IMPORT_FENCE_FD_INFO_KHR"),
Self::FENCE_GET_FD_INFO_KHR => Some("FENCE_GET_FD_INFO_KHR"),
Self::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR => Some("PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"),
Self::SURFACE_CAPABILITIES_2_KHR => Some("SURFACE_CAPABILITIES_2_KHR"),
Self::SURFACE_FORMAT_2_KHR => Some("SURFACE_FORMAT_2_KHR"),
Self::DISPLAY_PROPERTIES_2_KHR => Some("DISPLAY_PROPERTIES_2_KHR"),
Self::DISPLAY_PLANE_PROPERTIES_2_KHR => Some("DISPLAY_PLANE_PROPERTIES_2_KHR"),
Self::DISPLAY_MODE_PROPERTIES_2_KHR => Some("DISPLAY_MODE_PROPERTIES_2_KHR"),
Self::DISPLAY_PLANE_INFO_2_KHR => Some("DISPLAY_PLANE_INFO_2_KHR"),
Self::DISPLAY_PLANE_CAPABILITIES_2_KHR => Some("DISPLAY_PLANE_CAPABILITIES_2_KHR"),
Self::IOS_SURFACE_CREATE_INFO_M => Some("IOS_SURFACE_CREATE_INFO_M"),
Self::MACOS_SURFACE_CREATE_INFO_M => Some("MACOS_SURFACE_CREATE_INFO_M"),
Self::DEBUG_UTILS_OBJECT_NAME_INFO_EXT => Some("DEBUG_UTILS_OBJECT_NAME_INFO_EXT"),
Self::DEBUG_UTILS_OBJECT_TAG_INFO_EXT => Some("DEBUG_UTILS_OBJECT_TAG_INFO_EXT"),
Self::DEBUG_UTILS_LABEL_EXT => Some("DEBUG_UTILS_LABEL_EXT"),
Self::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT => {
Some("DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT")
}
Self::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT => {
Some("DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT")
}
Self::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID => {
Some("ANDROID_HARDWARE_BUFFER_USAGE_ANDROID")
}
Self::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID => {
Some("ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID")
}
Self::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID => {
Some("ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID")
}
Self::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => {
Some("IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID")
}
Self::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => {
Some("MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID")
}
Self::EXTERNAL_FORMAT_ANDROID => Some("EXTERNAL_FORMAT_ANDROID"),
Self::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT")
}
Self::SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT => {
Some("SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT")
}
Self::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT => {
Some("WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT")
}
Self::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT => {
Some("DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT")
}
Self::SAMPLE_LOCATIONS_INFO_EXT => Some("SAMPLE_LOCATIONS_INFO_EXT"),
Self::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT => {
Some("RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT")
}
Self::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT => {
Some("PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT")
}
Self::MULTISAMPLE_PROPERTIES_EXT => Some("MULTISAMPLE_PROPERTIES_EXT"),
Self::IMAGE_FORMAT_LIST_CREATE_INFO_KHR => Some("IMAGE_FORMAT_LIST_CREATE_INFO_KHR"),
Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT")
}
Self::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT => {
Some("PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT")
}
Self::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV => {
Some("PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV")
}
Self::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV => {
Some("PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV")
}
Self::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT => {
Some("DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT")
}
Self::DRM_FORMAT_MODIFIER_PROPERTIES_EXT => Some("DRM_FORMAT_MODIFIER_PROPERTIES_EXT"),
Self::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT => {
Some("PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT")
}
Self::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT => {
Some("IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT")
}
Self::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT => {
Some("IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT")
}
Self::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT => {
Some("IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT")
}
Self::VALIDATION_CACHE_CREATE_INFO_EXT => Some("VALIDATION_CACHE_CREATE_INFO_EXT"),
Self::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT => {
Some("SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT")
}
Self::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT => {
Some("DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT")
}
Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT => {
Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT")
}
Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT => {
Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT")
}
Self::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV => {
Some("PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV")
}
Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV => {
Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV")
}
Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV => {
Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV")
}
Self::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV => {
Some("PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV")
}
Self::RAY_TRACING_PIPELINE_CREATE_INFO_NV => {
Some("RAY_TRACING_PIPELINE_CREATE_INFO_NV")
}
Self::ACCELERATION_STRUCTURE_CREATE_INFO_NV => {
Some("ACCELERATION_STRUCTURE_CREATE_INFO_NV")
}
Self::GEOMETRY_NV => Some("GEOMETRY_NV"),
Self::GEOMETRY_TRIANGLES_NV => Some("GEOMETRY_TRIANGLES_NV"),
Self::GEOMETRY_AABB_NV => Some("GEOMETRY_AABB_NV"),
Self::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV => {
Some("BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV")
}
Self::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV => {
Some("WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV")
}
Self::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV => {
Some("ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV")
}
Self::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV => {
Some("PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV")
}
Self::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV => {
Some("RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV")
}
Self::ACCELERATION_STRUCTURE_INFO_NV => Some("ACCELERATION_STRUCTURE_INFO_NV"),
Self::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV => {
Some("PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV")
}
Self::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV => {
Some("PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV")
}
Self::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT => {
Some("DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR => {
Some("PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR")
}
Self::IMPORT_MEMORY_HOST_POINTER_INFO_EXT => {
Some("IMPORT_MEMORY_HOST_POINTER_INFO_EXT")
}
Self::MEMORY_HOST_POINTER_PROPERTIES_EXT => Some("MEMORY_HOST_POINTER_PROPERTIES_EXT"),
Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT")
}
Self::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR => {
Some("PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR")
}
Self::CALIBRATED_TIMESTAMP_INFO_EXT => Some("CALIBRATED_TIMESTAMP_INFO_EXT"),
Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD => {
Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD")
}
Self::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD => {
Some("DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD")
}
Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT")
}
Self::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT => {
Some("PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR => {
Some("PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR")
}
Self::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR => {
Some("PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR")
}
Self::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR => {
Some("PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR")
}
Self::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR => {
Some("SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR")
}
Self::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV => {
Some("PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV")
}
Self::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV => {
Some("PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV")
}
Self::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV => {
Some("PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV")
}
Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV => {
Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV")
}
Self::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV => {
Some("PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV")
}
Self::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV => {
Some("PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV")
}
Self::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV => {
Some("PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV")
}
Self::CHECKPOINT_DATA_NV => Some("CHECKPOINT_DATA_NV"),
Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV => {
Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV")
}
Self::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR => {
Some("PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR")
}
Self::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT")
}
Self::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA => {
Some("IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA")
}
Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT")
}
Self::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT => {
Some("RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT")
}
Self::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT")
}
Self::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT => {
Some("PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT")
}
Self::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT")
}
Self::MEMORY_PRIORITY_ALLOCATE_INFO_EXT => Some("MEMORY_PRIORITY_ALLOCATE_INFO_EXT"),
Self::PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT => {
Some("PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT")
}
Self::BUFFER_DEVICE_ADDRESS_INFO_EXT => Some("BUFFER_DEVICE_ADDRESS_INFO_EXT"),
Self::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT => {
Some("BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT")
}
Self::IMAGE_STENCIL_USAGE_CREATE_INFO_EXT => {
Some("IMAGE_STENCIL_USAGE_CREATE_INFO_EXT")
}
Self::VALIDATION_FEATURES_EXT => Some("VALIDATION_FEATURES_EXT"),
Self::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES => {
Some("PHYSICAL_DEVICE_SUBGROUP_PROPERTIES")
}
Self::BIND_BUFFER_MEMORY_INFO => Some("BIND_BUFFER_MEMORY_INFO"),
Self::BIND_IMAGE_MEMORY_INFO => Some("BIND_IMAGE_MEMORY_INFO"),
Self::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES => {
Some("PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES")
}
Self::MEMORY_DEDICATED_REQUIREMENTS => Some("MEMORY_DEDICATED_REQUIREMENTS"),
Self::MEMORY_DEDICATED_ALLOCATE_INFO => Some("MEMORY_DEDICATED_ALLOCATE_INFO"),
Self::MEMORY_ALLOCATE_FLAGS_INFO => Some("MEMORY_ALLOCATE_FLAGS_INFO"),
Self::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO => {
Some("DEVICE_GROUP_RENDER_PASS_BEGIN_INFO")
}
Self::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO => {
Some("DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO")
}
Self::DEVICE_GROUP_SUBMIT_INFO => Some("DEVICE_GROUP_SUBMIT_INFO"),
Self::DEVICE_GROUP_BIND_SPARSE_INFO => Some("DEVICE_GROUP_BIND_SPARSE_INFO"),
Self::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO => {
Some("BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO")
}
Self::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO => {
Some("BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO")
}
Self::PHYSICAL_DEVICE_GROUP_PROPERTIES => Some("PHYSICAL_DEVICE_GROUP_PROPERTIES"),
Self::DEVICE_GROUP_DEVICE_CREATE_INFO => Some("DEVICE_GROUP_DEVICE_CREATE_INFO"),
Self::BUFFER_MEMORY_REQUIREMENTS_INFO_2 => Some("BUFFER_MEMORY_REQUIREMENTS_INFO_2"),
Self::IMAGE_MEMORY_REQUIREMENTS_INFO_2 => Some("IMAGE_MEMORY_REQUIREMENTS_INFO_2"),
Self::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 => {
Some("IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2")
}
Self::MEMORY_REQUIREMENTS_2 => Some("MEMORY_REQUIREMENTS_2"),
Self::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 => Some("SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"),
Self::PHYSICAL_DEVICE_FEATURES_2 => Some("PHYSICAL_DEVICE_FEATURES_2"),
Self::PHYSICAL_DEVICE_PROPERTIES_2 => Some("PHYSICAL_DEVICE_PROPERTIES_2"),
Self::FORMAT_PROPERTIES_2 => Some("FORMAT_PROPERTIES_2"),
Self::IMAGE_FORMAT_PROPERTIES_2 => Some("IMAGE_FORMAT_PROPERTIES_2"),
Self::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 => {
Some("PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2")
}
Self::QUEUE_FAMILY_PROPERTIES_2 => Some("QUEUE_FAMILY_PROPERTIES_2"),
Self::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 => {
Some("PHYSICAL_DEVICE_MEMORY_PROPERTIES_2")
}
Self::SPARSE_IMAGE_FORMAT_PROPERTIES_2 => Some("SPARSE_IMAGE_FORMAT_PROPERTIES_2"),
Self::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 => {
Some("PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2")
}
Self::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES => {
Some("PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES")
}
Self::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO => {
Some("RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO")
}
Self::IMAGE_VIEW_USAGE_CREATE_INFO => Some("IMAGE_VIEW_USAGE_CREATE_INFO"),
Self::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO => {
Some("PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO")
}
Self::RENDER_PASS_MULTIVIEW_CREATE_INFO => Some("RENDER_PASS_MULTIVIEW_CREATE_INFO"),
Self::PHYSICAL_DEVICE_MULTIVIEW_FEATURES => Some("PHYSICAL_DEVICE_MULTIVIEW_FEATURES"),
Self::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES => {
Some("PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES")
}
Self::PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES => {
Some("PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES")
}
Self::PROTECTED_SUBMIT_INFO => Some("PROTECTED_SUBMIT_INFO"),
Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES => {
Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES")
}
Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES => {
Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES")
}
Self::DEVICE_QUEUE_INFO_2 => Some("DEVICE_QUEUE_INFO_2"),
Self::SAMPLER_YCBCR_CONVERSION_CREATE_INFO => {
Some("SAMPLER_YCBCR_CONVERSION_CREATE_INFO")
}
Self::SAMPLER_YCBCR_CONVERSION_INFO => Some("SAMPLER_YCBCR_CONVERSION_INFO"),
Self::BIND_IMAGE_PLANE_MEMORY_INFO => Some("BIND_IMAGE_PLANE_MEMORY_INFO"),
Self::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO => {
Some("IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO")
}
Self::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES => {
Some("PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES")
}
Self::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES => {
Some("SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES")
}
Self::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO => {
Some("DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO")
}
Self::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO => {
Some("PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO")
}
Self::EXTERNAL_IMAGE_FORMAT_PROPERTIES => Some("EXTERNAL_IMAGE_FORMAT_PROPERTIES"),
Self::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO => {
Some("PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO")
}
Self::EXTERNAL_BUFFER_PROPERTIES => Some("EXTERNAL_BUFFER_PROPERTIES"),
Self::PHYSICAL_DEVICE_ID_PROPERTIES => Some("PHYSICAL_DEVICE_ID_PROPERTIES"),
Self::EXTERNAL_MEMORY_BUFFER_CREATE_INFO => Some("EXTERNAL_MEMORY_BUFFER_CREATE_INFO"),
Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO => Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO"),
Self::EXPORT_MEMORY_ALLOCATE_INFO => Some("EXPORT_MEMORY_ALLOCATE_INFO"),
Self::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO => {
Some("PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO")
}
Self::EXTERNAL_FENCE_PROPERTIES => Some("EXTERNAL_FENCE_PROPERTIES"),
Self::EXPORT_FENCE_CREATE_INFO => Some("EXPORT_FENCE_CREATE_INFO"),
Self::EXPORT_SEMAPHORE_CREATE_INFO => Some("EXPORT_SEMAPHORE_CREATE_INFO"),
Self::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO => {
Some("PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO")
}
Self::EXTERNAL_SEMAPHORE_PROPERTIES => Some("EXTERNAL_SEMAPHORE_PROPERTIES"),
Self::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES => {
Some("PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES")
}
Self::DESCRIPTOR_SET_LAYOUT_SUPPORT => Some("DESCRIPTOR_SET_LAYOUT_SUPPORT"),
Self::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES => {
Some("PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES")
}
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SubgroupFeatureFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(SubgroupFeatureFlags::BASIC.0, "BASIC"),
(SubgroupFeatureFlags::VOTE.0, "VOTE"),
(SubgroupFeatureFlags::ARITHMETIC.0, "ARITHMETIC"),
(SubgroupFeatureFlags::BALLOT.0, "BALLOT"),
(SubgroupFeatureFlags::SHUFFLE.0, "SHUFFLE"),
(SubgroupFeatureFlags::SHUFFLE_RELATIVE.0, "SHUFFLE_RELATIVE"),
(SubgroupFeatureFlags::CLUSTERED.0, "CLUSTERED"),
(SubgroupFeatureFlags::QUAD.0, "QUAD"),
(SubgroupFeatureFlags::PARTITIONED_NV.0, "PARTITIONED_NV"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SubpassContents {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::INLINE => Some("INLINE"),
Self::SECONDARY_COMMAND_BUFFERS => Some("SECONDARY_COMMAND_BUFFERS"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for SubpassDescriptionFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
SubpassDescriptionFlags::PER_VIEW_ATTRIBUTES_NVX.0,
"PER_VIEW_ATTRIBUTES_NVX",
),
(
SubpassDescriptionFlags::PER_VIEW_POSITION_X_ONLY_NVX.0,
"PER_VIEW_POSITION_X_ONLY_NVX",
),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SurfaceCounterFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[(SurfaceCounterFlagsEXT::VBLANK.0, "VBLANK")];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SurfaceTransformFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(SurfaceTransformFlagsKHR::IDENTITY.0, "IDENTITY"),
(SurfaceTransformFlagsKHR::ROTATE_90.0, "ROTATE_90"),
(SurfaceTransformFlagsKHR::ROTATE_180.0, "ROTATE_180"),
(SurfaceTransformFlagsKHR::ROTATE_270.0, "ROTATE_270"),
(
SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR.0,
"HORIZONTAL_MIRROR",
),
(
SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_90.0,
"HORIZONTAL_MIRROR_ROTATE_90",
),
(
SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_180.0,
"HORIZONTAL_MIRROR_ROTATE_180",
),
(
SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_270.0,
"HORIZONTAL_MIRROR_ROTATE_270",
),
(SurfaceTransformFlagsKHR::INHERIT.0, "INHERIT"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SwapchainCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[
(
SwapchainCreateFlagsKHR::SPLIT_INSTANCE_BIND_REGIONS.0,
"SPLIT_INSTANCE_BIND_REGIONS",
),
(SwapchainCreateFlagsKHR::PROTECTED.0, "PROTECTED"),
(SwapchainCreateFlagsKHR::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"),
];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for SystemAllocationScope {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::COMMAND => Some("COMMAND"),
Self::OBJECT => Some("OBJECT"),
Self::CACHE => Some("CACHE"),
Self::DEVICE => Some("DEVICE"),
Self::INSTANCE => Some("INSTANCE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for TessellationDomainOrigin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::UPPER_LEFT => Some("UPPER_LEFT"),
Self::LOWER_LEFT => Some("LOWER_LEFT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for TimeDomainEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::DEVICE => Some("DEVICE"),
Self::CLOCK_MONOTONIC => Some("CLOCK_MONOTONIC"),
Self::CLOCK_MONOTONIC_RAW => Some("CLOCK_MONOTONIC_RAW"),
Self::QUERY_PERFORMANCE_COUNTER => Some("QUERY_PERFORMANCE_COUNTER"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ValidationCacheCreateFlagsEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ValidationCacheHeaderVersionEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ONE => Some("ONE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ValidationCheckEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ALL => Some("ALL"),
Self::SHADERS => Some("SHADERS"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ValidationFeatureDisableEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::ALL => Some("ALL"),
Self::SHADERS => Some("SHADERS"),
Self::THREAD_SAFETY => Some("THREAD_SAFETY"),
Self::API_PARAMETERS => Some("API_PARAMETERS"),
Self::OBJECT_LIFETIMES => Some("OBJECT_LIFETIMES"),
Self::CORE_CHECKS => Some("CORE_CHECKS"),
Self::UNIQUE_HANDLES => Some("UNIQUE_HANDLES"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ValidationFeatureEnableEXT {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::GPU_ASSISTED => Some("GPU_ASSISTED"),
Self::GPU_ASSISTED_RESERVE_BINDING_SLOT => Some("GPU_ASSISTED_RESERVE_BINDING_SLOT"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for VendorId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::VIV => Some("VIV"),
Self::VSI => Some("VSI"),
Self::KAZAN => Some("KAZAN"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for VertexInputRate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::VERTEX => Some("VERTEX"),
Self::INSTANCE => Some("INSTANCE"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for ViSurfaceCreateFlagsNN {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for ViewportCoordinateSwizzleNV {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
Self::POSITIVE_X => Some("POSITIVE_X"),
Self::NEGATIVE_X => Some("NEGATIVE_X"),
Self::POSITIVE_Y => Some("POSITIVE_Y"),
Self::NEGATIVE_Y => Some("NEGATIVE_Y"),
Self::POSITIVE_Z => Some("POSITIVE_Z"),
Self::NEGATIVE_Z => Some("NEGATIVE_Z"),
Self::POSITIVE_W => Some("POSITIVE_W"),
Self::NEGATIVE_W => Some("NEGATIVE_W"),
_ => None,
};
if let Some(x) = name {
f.write_str(x)
} else {
self.0.fmt(f)
}
}
}
impl fmt::Debug for WaylandSurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for Win32SurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for XcbSurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
impl fmt::Debug for XlibSurfaceCreateFlagsKHR {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const KNOWN: &[(Flags, &str)] = &[];
debug_flags(f, KNOWN, self.0)
}
}
pub type DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
pub type PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
pub type MemoryAllocateFlagsKHR = MemoryAllocateFlags;
pub type CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
pub type ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
pub type ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
pub type ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
pub type ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
pub type SemaphoreImportFlagsKHR = SemaphoreImportFlags;
pub type ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
pub type ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
pub type FenceImportFlagsKHR = FenceImportFlags;
pub type DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
pub type SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
pub type DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
pub type PointClippingBehaviorKHR = PointClippingBehavior;
pub type TessellationDomainOriginKHR = TessellationDomainOrigin;
pub type SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
pub type SamplerYcbcrRangeKHR = SamplerYcbcrRange;
pub type ChromaLocationKHR = ChromaLocation;
pub type PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
pub type PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
pub type FormatProperties2KHR = FormatProperties2;
pub type ImageFormatProperties2KHR = ImageFormatProperties2;
pub type PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
pub type QueueFamilyProperties2KHR = QueueFamilyProperties2;
pub type PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
pub type SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
pub type PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
pub type PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointerFeatures;
pub type ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
pub type PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
pub type ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
pub type PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
pub type ExternalBufferPropertiesKHR = ExternalBufferProperties;
pub type PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
pub type ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
pub type ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
pub type ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
pub type PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
pub type ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
pub type ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
pub type PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
pub type ExternalFencePropertiesKHR = ExternalFenceProperties;
pub type ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
pub type PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
pub type PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
pub type RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
pub type PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
pub type MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
pub type BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
pub type BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
pub type BindImageMemoryInfoKHR = BindImageMemoryInfo;
pub type BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
pub type DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
pub type DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
pub type DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
pub type DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
pub type DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
pub type DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
pub type DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
pub type InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
pub type RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
pub type PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
pub type BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
pub type ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
pub type ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
pub type MemoryRequirements2KHR = MemoryRequirements2;
pub type SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
pub type PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
pub type MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
pub type MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
pub type ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
pub type PipelineTessellationDomainOriginStateCreateInfoKHR =
PipelineTessellationDomainOriginStateCreateInfo;
pub type SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
pub type SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
pub type BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
pub type ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
pub type PhysicalDeviceSamplerYcbcrConversionFeaturesKHR =
PhysicalDeviceSamplerYcbcrConversionFeatures;
pub type SamplerYcbcrConversionImageFormatPropertiesKHR =
SamplerYcbcrConversionImageFormatProperties;
pub type PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
pub type DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;