mirror of
https://github.com/skyline-emu/skyline.git
synced 2024-12-26 03:21:50 +01:00
1779 lines
60 KiB
C++
1779 lines
60 KiB
C++
|
// Copyright (c) 2015-2019 The Khronos Group Inc.
|
||
|
//
|
||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
// you may not use this file except in compliance with the License.
|
||
|
// You may obtain a copy of the License at
|
||
|
//
|
||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||
|
//
|
||
|
// Unless required by applicable law or agreed to in writing, software
|
||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
// See the License for the specific language governing permissions and
|
||
|
// limitations under the License.
|
||
|
//
|
||
|
// ---- Exceptions to the Apache 2.0 License: ----
|
||
|
//
|
||
|
// As an exception, if you use this Software to generate code and portions of
|
||
|
// this Software are embedded into the generated code as a result, you may
|
||
|
// redistribute such product without providing attribution as would otherwise
|
||
|
// be required by Sections 4(a), 4(b) and 4(d) of the License.
|
||
|
//
|
||
|
// In addition, if you combine or link code generated by this Software with
|
||
|
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
|
||
|
// ("`Combined Software`") and if a court of competent jurisdiction determines
|
||
|
// that the patent provision (Section 3), the indemnity provision (Section 9)
|
||
|
// or other Section of the License conflicts with the conditions of the
|
||
|
// applicable GPL or LGPL license, you may retroactively and prospectively
|
||
|
// choose to deem waived or otherwise exclude such Section(s) of the License,
|
||
|
// but only in their entirety and only with respect to the Combined Software.
|
||
|
//
|
||
|
|
||
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
||
|
|
||
|
#pragma once
|
||
|
|
||
|
#include "../handles.hpp"
|
||
|
#include "VkAcquire.hpp"
|
||
|
#include "VkAcceleration.hpp"
|
||
|
#include "VkApplication.hpp"
|
||
|
#include "VkAllocation.hpp"
|
||
|
#include "VkAndroid.hpp"
|
||
|
#include "VkBase.hpp"
|
||
|
#include "VkAttachment.hpp"
|
||
|
#include "VkBind.hpp"
|
||
|
|
||
|
namespace VULKAN_HPP_NAMESPACE
|
||
|
{
|
||
|
struct BindAccelerationStructureMemoryInfoNV
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
|
||
|
uint32_t deviceIndexCount_ = {},
|
||
|
const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: accelerationStructure( accelerationStructure_ )
|
||
|
, memory( memory_ )
|
||
|
, memoryOffset( memoryOffset_ )
|
||
|
, deviceIndexCount( deviceIndexCount_ )
|
||
|
, pDeviceIndices( pDeviceIndices_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, accelerationStructure( rhs.accelerationStructure )
|
||
|
, memory( rhs.memory )
|
||
|
, memoryOffset( rhs.memoryOffset )
|
||
|
, deviceIndexCount( rhs.deviceIndexCount )
|
||
|
, pDeviceIndices( rhs.pDeviceIndices )
|
||
|
{}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
accelerationStructure = accelerationStructure_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memory = memory_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memoryOffset = memoryOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
deviceIndexCount = deviceIndexCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pDeviceIndices = pDeviceIndices_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindAccelerationStructureMemoryInfoNV const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( accelerationStructure == rhs.accelerationStructure )
|
||
|
&& ( memory == rhs.memory )
|
||
|
&& ( memoryOffset == rhs.memoryOffset )
|
||
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
||
|
&& ( pDeviceIndices == rhs.pDeviceIndices );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
||
|
uint32_t deviceIndexCount = {};
|
||
|
const uint32_t* pDeviceIndices = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindBufferMemoryDeviceGroupInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
|
||
|
const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: deviceIndexCount( deviceIndexCount_ )
|
||
|
, pDeviceIndices( pDeviceIndices_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, deviceIndexCount( rhs.deviceIndexCount )
|
||
|
, pDeviceIndices( rhs.pDeviceIndices )
|
||
|
{}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
deviceIndexCount = deviceIndexCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pDeviceIndices = pDeviceIndices_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
||
|
&& ( pDeviceIndices == rhs.pDeviceIndices );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
|
||
|
const void* pNext = {};
|
||
|
uint32_t deviceIndexCount = {};
|
||
|
const uint32_t* pDeviceIndices = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindBufferMemoryInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: buffer( buffer_ )
|
||
|
, memory( memory_ )
|
||
|
, memoryOffset( memoryOffset_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, buffer( rhs.buffer )
|
||
|
, memory( rhs.memory )
|
||
|
, memoryOffset( rhs.memoryOffset )
|
||
|
{}
|
||
|
|
||
|
BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memory = memory_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memoryOffset = memoryOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindBufferMemoryInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( buffer == rhs.buffer )
|
||
|
&& ( memory == rhs.memory )
|
||
|
&& ( memoryOffset == rhs.memoryOffset );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct Offset2D
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {},
|
||
|
int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: x( x_ )
|
||
|
, y( y_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: x( rhs.x )
|
||
|
, y( rhs.y )
|
||
|
{}
|
||
|
|
||
|
Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( Offset2D ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
x = x_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
y = y_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkOffset2D*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkOffset2D*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( Offset2D const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( x == rhs.x )
|
||
|
&& ( y == rhs.y );
|
||
|
}
|
||
|
|
||
|
bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
int32_t x = {};
|
||
|
int32_t y = {};
|
||
|
};
|
||
|
static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct Rect2D
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: offset( offset_ )
|
||
|
, extent( extent_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: offset( rhs.offset )
|
||
|
, extent( rhs.extent )
|
||
|
{}
|
||
|
|
||
|
Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( Rect2D ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
offset = offset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
extent = extent_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkRect2D*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkRect2D &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkRect2D*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( Rect2D const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( offset == rhs.offset )
|
||
|
&& ( extent == rhs.extent );
|
||
|
}
|
||
|
|
||
|
bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
|
||
|
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
|
||
|
};
|
||
|
static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindImageMemoryDeviceGroupInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
|
||
|
const uint32_t* pDeviceIndices_ = {},
|
||
|
uint32_t splitInstanceBindRegionCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: deviceIndexCount( deviceIndexCount_ )
|
||
|
, pDeviceIndices( pDeviceIndices_ )
|
||
|
, splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
|
||
|
, pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, deviceIndexCount( rhs.deviceIndexCount )
|
||
|
, pDeviceIndices( rhs.pDeviceIndices )
|
||
|
, splitInstanceBindRegionCount( rhs.splitInstanceBindRegionCount )
|
||
|
, pSplitInstanceBindRegions( rhs.pSplitInstanceBindRegions )
|
||
|
{}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
deviceIndexCount = deviceIndexCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pDeviceIndices = pDeviceIndices_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
||
|
&& ( pDeviceIndices == rhs.pDeviceIndices )
|
||
|
&& ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
|
||
|
&& ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
|
||
|
const void* pNext = {};
|
||
|
uint32_t deviceIndexCount = {};
|
||
|
const uint32_t* pDeviceIndices = {};
|
||
|
uint32_t splitInstanceBindRegionCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindImageMemoryInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: image( image_ )
|
||
|
, memory( memory_ )
|
||
|
, memoryOffset( memoryOffset_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, image( rhs.image )
|
||
|
, memory( rhs.memory )
|
||
|
, memoryOffset( rhs.memoryOffset )
|
||
|
{}
|
||
|
|
||
|
BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
image = image_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memory = memory_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memoryOffset = memoryOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindImageMemoryInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( image == rhs.image )
|
||
|
&& ( memory == rhs.memory )
|
||
|
&& ( memoryOffset == rhs.memoryOffset );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindImageMemorySwapchainInfoKHR
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
|
||
|
uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: swapchain( swapchain_ )
|
||
|
, imageIndex( imageIndex_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, swapchain( rhs.swapchain )
|
||
|
, imageIndex( rhs.imageIndex )
|
||
|
{}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
swapchain = swapchain_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageIndex = imageIndex_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( swapchain == rhs.swapchain )
|
||
|
&& ( imageIndex == rhs.imageIndex );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
||
|
uint32_t imageIndex = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindImagePlaneMemoryInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
|
||
|
: planeAspect( planeAspect_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, planeAspect( rhs.planeAspect )
|
||
|
{}
|
||
|
|
||
|
BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
planeAspect = planeAspect_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( planeAspect == rhs.planeAspect );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
||
|
};
|
||
|
static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct SparseMemoryBind
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: resourceOffset( resourceOffset_ )
|
||
|
, size( size_ )
|
||
|
, memory( memory_ )
|
||
|
, memoryOffset( memoryOffset_ )
|
||
|
, flags( flags_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: resourceOffset( rhs.resourceOffset )
|
||
|
, size( rhs.size )
|
||
|
, memory( rhs.memory )
|
||
|
, memoryOffset( rhs.memoryOffset )
|
||
|
, flags( rhs.flags )
|
||
|
{}
|
||
|
|
||
|
SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( SparseMemoryBind ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
resourceOffset = resourceOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
size = size_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memory = memory_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memoryOffset = memoryOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
flags = flags_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkSparseMemoryBind*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkSparseMemoryBind*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( SparseMemoryBind const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( resourceOffset == rhs.resourceOffset )
|
||
|
&& ( size == rhs.size )
|
||
|
&& ( memory == rhs.memory )
|
||
|
&& ( memoryOffset == rhs.memoryOffset )
|
||
|
&& ( flags == rhs.flags );
|
||
|
}
|
||
|
|
||
|
bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
|
||
|
};
|
||
|
static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct SparseBufferMemoryBindInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
||
|
uint32_t bindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: buffer( buffer_ )
|
||
|
, bindCount( bindCount_ )
|
||
|
, pBinds( pBinds_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: buffer( rhs.buffer )
|
||
|
, bindCount( rhs.bindCount )
|
||
|
, pBinds( rhs.pBinds )
|
||
|
{}
|
||
|
|
||
|
SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( SparseBufferMemoryBindInfo ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bindCount = bindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pBinds = pBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( buffer == rhs.buffer )
|
||
|
&& ( bindCount == rhs.bindCount )
|
||
|
&& ( pBinds == rhs.pBinds );
|
||
|
}
|
||
|
|
||
|
bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
uint32_t bindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
|
||
|
};
|
||
|
static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct SparseImageOpaqueMemoryBindInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
|
||
|
uint32_t bindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: image( image_ )
|
||
|
, bindCount( bindCount_ )
|
||
|
, pBinds( pBinds_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: image( rhs.image )
|
||
|
, bindCount( rhs.bindCount )
|
||
|
, pBinds( rhs.pBinds )
|
||
|
{}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
image = image_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bindCount = bindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pBinds = pBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( image == rhs.image )
|
||
|
&& ( bindCount == rhs.bindCount )
|
||
|
&& ( pBinds == rhs.pBinds );
|
||
|
}
|
||
|
|
||
|
bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
||
|
uint32_t bindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
|
||
|
};
|
||
|
static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct ImageSubresource
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
||
|
uint32_t mipLevel_ = {},
|
||
|
uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: aspectMask( aspectMask_ )
|
||
|
, mipLevel( mipLevel_ )
|
||
|
, arrayLayer( arrayLayer_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: aspectMask( rhs.aspectMask )
|
||
|
, mipLevel( rhs.mipLevel )
|
||
|
, arrayLayer( rhs.arrayLayer )
|
||
|
{}
|
||
|
|
||
|
ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ImageSubresource ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
aspectMask = aspectMask_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
mipLevel = mipLevel_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
arrayLayer = arrayLayer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkImageSubresource*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkImageSubresource*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( ImageSubresource const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( aspectMask == rhs.aspectMask )
|
||
|
&& ( mipLevel == rhs.mipLevel )
|
||
|
&& ( arrayLayer == rhs.arrayLayer );
|
||
|
}
|
||
|
|
||
|
bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
||
|
uint32_t mipLevel = {};
|
||
|
uint32_t arrayLayer = {};
|
||
|
};
|
||
|
static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct Offset3D
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {},
|
||
|
int32_t y_ = {},
|
||
|
int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: x( x_ )
|
||
|
, y( y_ )
|
||
|
, z( z_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: x( rhs.x )
|
||
|
, y( rhs.y )
|
||
|
, z( rhs.z )
|
||
|
{}
|
||
|
|
||
|
explicit Offset3D( Offset2D const& offset2D,
|
||
|
int32_t z_ = {} )
|
||
|
: x( offset2D.x )
|
||
|
, y( offset2D.y )
|
||
|
, z( z_ )
|
||
|
{}
|
||
|
|
||
|
Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( Offset3D ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
x = x_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
y = y_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
z = z_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkOffset3D*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkOffset3D*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( Offset3D const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( x == rhs.x )
|
||
|
&& ( y == rhs.y )
|
||
|
&& ( z == rhs.z );
|
||
|
}
|
||
|
|
||
|
bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
int32_t x = {};
|
||
|
int32_t y = {};
|
||
|
int32_t z = {};
|
||
|
};
|
||
|
static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct Extent3D
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {},
|
||
|
uint32_t height_ = {},
|
||
|
uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: width( width_ )
|
||
|
, height( height_ )
|
||
|
, depth( depth_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: width( rhs.width )
|
||
|
, height( rhs.height )
|
||
|
, depth( rhs.depth )
|
||
|
{}
|
||
|
|
||
|
explicit Extent3D( Extent2D const& extent2D,
|
||
|
uint32_t depth_ = {} )
|
||
|
: width( extent2D.width )
|
||
|
, height( extent2D.height )
|
||
|
, depth( depth_ )
|
||
|
{}
|
||
|
|
||
|
Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( Extent3D ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
width = width_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
height = height_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
depth = depth_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkExtent3D*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkExtent3D*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( Extent3D const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( width == rhs.width )
|
||
|
&& ( height == rhs.height )
|
||
|
&& ( depth == rhs.depth );
|
||
|
}
|
||
|
|
||
|
bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
uint32_t width = {};
|
||
|
uint32_t height = {};
|
||
|
uint32_t depth = {};
|
||
|
};
|
||
|
static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct SparseImageMemoryBind
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: subresource( subresource_ )
|
||
|
, offset( offset_ )
|
||
|
, extent( extent_ )
|
||
|
, memory( memory_ )
|
||
|
, memoryOffset( memoryOffset_ )
|
||
|
, flags( flags_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: subresource( rhs.subresource )
|
||
|
, offset( rhs.offset )
|
||
|
, extent( rhs.extent )
|
||
|
, memory( rhs.memory )
|
||
|
, memoryOffset( rhs.memoryOffset )
|
||
|
, flags( rhs.flags )
|
||
|
{}
|
||
|
|
||
|
SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( SparseImageMemoryBind ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
subresource = subresource_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
offset = offset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
extent = extent_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memory = memory_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memoryOffset = memoryOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
flags = flags_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( SparseImageMemoryBind const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( subresource == rhs.subresource )
|
||
|
&& ( offset == rhs.offset )
|
||
|
&& ( extent == rhs.extent )
|
||
|
&& ( memory == rhs.memory )
|
||
|
&& ( memoryOffset == rhs.memoryOffset )
|
||
|
&& ( flags == rhs.flags );
|
||
|
}
|
||
|
|
||
|
bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
|
||
|
VULKAN_HPP_NAMESPACE::Offset3D offset = {};
|
||
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
|
||
|
};
|
||
|
static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct SparseImageMemoryBindInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
|
||
|
uint32_t bindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: image( image_ )
|
||
|
, bindCount( bindCount_ )
|
||
|
, pBinds( pBinds_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: image( rhs.image )
|
||
|
, bindCount( rhs.bindCount )
|
||
|
, pBinds( rhs.pBinds )
|
||
|
{}
|
||
|
|
||
|
SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( SparseImageMemoryBindInfo ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
image = image_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bindCount = bindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pBinds = pBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( SparseImageMemoryBindInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( image == rhs.image )
|
||
|
&& ( bindCount == rhs.bindCount )
|
||
|
&& ( pBinds == rhs.pBinds );
|
||
|
}
|
||
|
|
||
|
bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
||
|
uint32_t bindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {};
|
||
|
};
|
||
|
static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BindSparseInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
|
||
|
uint32_t bufferBindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {},
|
||
|
uint32_t imageOpaqueBindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {},
|
||
|
uint32_t imageBindCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {},
|
||
|
uint32_t signalSemaphoreCount_ = {},
|
||
|
const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: waitSemaphoreCount( waitSemaphoreCount_ )
|
||
|
, pWaitSemaphores( pWaitSemaphores_ )
|
||
|
, bufferBindCount( bufferBindCount_ )
|
||
|
, pBufferBinds( pBufferBinds_ )
|
||
|
, imageOpaqueBindCount( imageOpaqueBindCount_ )
|
||
|
, pImageOpaqueBinds( pImageOpaqueBinds_ )
|
||
|
, imageBindCount( imageBindCount_ )
|
||
|
, pImageBinds( pImageBinds_ )
|
||
|
, signalSemaphoreCount( signalSemaphoreCount_ )
|
||
|
, pSignalSemaphores( pSignalSemaphores_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, waitSemaphoreCount( rhs.waitSemaphoreCount )
|
||
|
, pWaitSemaphores( rhs.pWaitSemaphores )
|
||
|
, bufferBindCount( rhs.bufferBindCount )
|
||
|
, pBufferBinds( rhs.pBufferBinds )
|
||
|
, imageOpaqueBindCount( rhs.imageOpaqueBindCount )
|
||
|
, pImageOpaqueBinds( rhs.pImageOpaqueBinds )
|
||
|
, imageBindCount( rhs.imageBindCount )
|
||
|
, pImageBinds( rhs.pImageBinds )
|
||
|
, signalSemaphoreCount( rhs.signalSemaphoreCount )
|
||
|
, pSignalSemaphores( rhs.pSignalSemaphores )
|
||
|
{}
|
||
|
|
||
|
BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
waitSemaphoreCount = waitSemaphoreCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pWaitSemaphores = pWaitSemaphores_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bufferBindCount = bufferBindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pBufferBinds = pBufferBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageOpaqueBindCount = imageOpaqueBindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pImageOpaqueBinds = pImageOpaqueBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageBindCount = imageBindCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pImageBinds = pImageBinds_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
signalSemaphoreCount = signalSemaphoreCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pSignalSemaphores = pSignalSemaphores_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBindSparseInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBindSparseInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BindSparseInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
|
||
|
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
|
||
|
&& ( bufferBindCount == rhs.bufferBindCount )
|
||
|
&& ( pBufferBinds == rhs.pBufferBinds )
|
||
|
&& ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
|
||
|
&& ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
|
||
|
&& ( imageBindCount == rhs.imageBindCount )
|
||
|
&& ( pImageBinds == rhs.pImageBinds )
|
||
|
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
|
||
|
&& ( pSignalSemaphores == rhs.pSignalSemaphores );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
|
||
|
const void* pNext = {};
|
||
|
uint32_t waitSemaphoreCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
|
||
|
uint32_t bufferBindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {};
|
||
|
uint32_t imageOpaqueBindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {};
|
||
|
uint32_t imageBindCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {};
|
||
|
uint32_t signalSemaphoreCount = {};
|
||
|
const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
|
||
|
};
|
||
|
static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
} // namespace VULKAN_HPP_NAMESPACE
|