mirror of
https://github.com/skyline-emu/skyline.git
synced 2024-12-26 03:21:50 +01:00
957 lines
33 KiB
C++
957 lines
33 KiB
C++
|
// Copyright (c) 2015-2019 The Khronos Group Inc.
|
||
|
//
|
||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
// you may not use this file except in compliance with the License.
|
||
|
// You may obtain a copy of the License at
|
||
|
//
|
||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||
|
//
|
||
|
// Unless required by applicable law or agreed to in writing, software
|
||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
// See the License for the specific language governing permissions and
|
||
|
// limitations under the License.
|
||
|
//
|
||
|
// ---- Exceptions to the Apache 2.0 License: ----
|
||
|
//
|
||
|
// As an exception, if you use this Software to generate code and portions of
|
||
|
// this Software are embedded into the generated code as a result, you may
|
||
|
// redistribute such product without providing attribution as would otherwise
|
||
|
// be required by Sections 4(a), 4(b) and 4(d) of the License.
|
||
|
//
|
||
|
// In addition, if you combine or link code generated by this Software with
|
||
|
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
|
||
|
// ("`Combined Software`") and if a court of competent jurisdiction determines
|
||
|
// that the patent provision (Section 3), the indemnity provision (Section 9)
|
||
|
// or other Section of the License conflicts with the conditions of the
|
||
|
// applicable GPL or LGPL license, you may retroactively and prospectively
|
||
|
// choose to deem waived or otherwise exclude such Section(s) of the License,
|
||
|
// but only in their entirety and only with respect to the Combined Software.
|
||
|
//
|
||
|
|
||
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
||
|
|
||
|
#pragma once
|
||
|
|
||
|
#include "../handles.hpp"
|
||
|
#include "VkAcquire.hpp"
|
||
|
#include "VkAcceleration.hpp"
|
||
|
#include "VkApplication.hpp"
|
||
|
#include "VkAllocation.hpp"
|
||
|
#include "VkBind.hpp"
|
||
|
#include "VkAndroid.hpp"
|
||
|
#include "VkBase.hpp"
|
||
|
#include "VkAttachment.hpp"
|
||
|
#include "VkBuffer.hpp"
|
||
|
|
||
|
namespace VULKAN_HPP_NAMESPACE
|
||
|
{
|
||
|
struct BufferCopy
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: srcOffset( srcOffset_ )
|
||
|
, dstOffset( dstOffset_ )
|
||
|
, size( size_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: srcOffset( rhs.srcOffset )
|
||
|
, dstOffset( rhs.dstOffset )
|
||
|
, size( rhs.size )
|
||
|
{}
|
||
|
|
||
|
BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( BufferCopy ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
srcOffset = srcOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
dstOffset = dstOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
size = size_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferCopy*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferCopy*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferCopy const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( srcOffset == rhs.srcOffset )
|
||
|
&& ( dstOffset == rhs.dstOffset )
|
||
|
&& ( size == rhs.size );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferCreateInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
||
|
uint32_t queueFamilyIndexCount_ = {},
|
||
|
const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: flags( flags_ )
|
||
|
, size( size_ )
|
||
|
, usage( usage_ )
|
||
|
, sharingMode( sharingMode_ )
|
||
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
||
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, flags( rhs.flags )
|
||
|
, size( rhs.size )
|
||
|
, usage( rhs.usage )
|
||
|
, sharingMode( rhs.sharingMode )
|
||
|
, queueFamilyIndexCount( rhs.queueFamilyIndexCount )
|
||
|
, pQueueFamilyIndices( rhs.pQueueFamilyIndices )
|
||
|
{}
|
||
|
|
||
|
BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
flags = flags_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
size = size_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
usage = usage_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
sharingMode = sharingMode_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferCreateInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferCreateInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferCreateInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( flags == rhs.flags )
|
||
|
&& ( size == rhs.size )
|
||
|
&& ( usage == rhs.usage )
|
||
|
&& ( sharingMode == rhs.sharingMode )
|
||
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
||
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
||
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
||
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
||
|
uint32_t queueFamilyIndexCount = {};
|
||
|
const uint32_t* pQueueFamilyIndices = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferDeviceAddressCreateInfoEXT
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: deviceAddress( deviceAddress_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, deviceAddress( rhs.deviceAddress )
|
||
|
{}
|
||
|
|
||
|
BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
deviceAddress = deviceAddress_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( deviceAddress == rhs.deviceAddress );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferDeviceAddressInfoEXT
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: buffer( buffer_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( BufferDeviceAddressInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, buffer( rhs.buffer )
|
||
|
{}
|
||
|
|
||
|
BufferDeviceAddressInfoEXT & operator=( BufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressInfoEXT ) - offsetof( BufferDeviceAddressInfoEXT, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferDeviceAddressInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferDeviceAddressInfoEXT const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferDeviceAddressInfoEXT*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferDeviceAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferDeviceAddressInfoEXT*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferDeviceAddressInfoEXT const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( buffer == rhs.buffer );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfoEXT;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferDeviceAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct ImageSubresourceLayers
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
||
|
uint32_t mipLevel_ = {},
|
||
|
uint32_t baseArrayLayer_ = {},
|
||
|
uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: aspectMask( aspectMask_ )
|
||
|
, mipLevel( mipLevel_ )
|
||
|
, baseArrayLayer( baseArrayLayer_ )
|
||
|
, layerCount( layerCount_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: aspectMask( rhs.aspectMask )
|
||
|
, mipLevel( rhs.mipLevel )
|
||
|
, baseArrayLayer( rhs.baseArrayLayer )
|
||
|
, layerCount( rhs.layerCount )
|
||
|
{}
|
||
|
|
||
|
ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ImageSubresourceLayers ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
aspectMask = aspectMask_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
mipLevel = mipLevel_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
baseArrayLayer = baseArrayLayer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
layerCount = layerCount_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkImageSubresourceLayers*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( ImageSubresourceLayers const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( aspectMask == rhs.aspectMask )
|
||
|
&& ( mipLevel == rhs.mipLevel )
|
||
|
&& ( baseArrayLayer == rhs.baseArrayLayer )
|
||
|
&& ( layerCount == rhs.layerCount );
|
||
|
}
|
||
|
|
||
|
bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
||
|
uint32_t mipLevel = {};
|
||
|
uint32_t baseArrayLayer = {};
|
||
|
uint32_t layerCount = {};
|
||
|
};
|
||
|
static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferImageCopy
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {},
|
||
|
uint32_t bufferRowLength_ = {},
|
||
|
uint32_t bufferImageHeight_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: bufferOffset( bufferOffset_ )
|
||
|
, bufferRowLength( bufferRowLength_ )
|
||
|
, bufferImageHeight( bufferImageHeight_ )
|
||
|
, imageSubresource( imageSubresource_ )
|
||
|
, imageOffset( imageOffset_ )
|
||
|
, imageExtent( imageExtent_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: bufferOffset( rhs.bufferOffset )
|
||
|
, bufferRowLength( rhs.bufferRowLength )
|
||
|
, bufferImageHeight( rhs.bufferImageHeight )
|
||
|
, imageSubresource( rhs.imageSubresource )
|
||
|
, imageOffset( rhs.imageOffset )
|
||
|
, imageExtent( rhs.imageExtent )
|
||
|
{}
|
||
|
|
||
|
BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( static_cast<void*>(this), &rhs, sizeof( BufferImageCopy ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bufferOffset = bufferOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bufferRowLength = bufferRowLength_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
bufferImageHeight = bufferImageHeight_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageSubresource = imageSubresource_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageOffset = imageOffset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
imageExtent = imageExtent_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferImageCopy*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferImageCopy*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferImageCopy const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( bufferOffset == rhs.bufferOffset )
|
||
|
&& ( bufferRowLength == rhs.bufferRowLength )
|
||
|
&& ( bufferImageHeight == rhs.bufferImageHeight )
|
||
|
&& ( imageSubresource == rhs.imageSubresource )
|
||
|
&& ( imageOffset == rhs.imageOffset )
|
||
|
&& ( imageExtent == rhs.imageExtent );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
|
||
|
uint32_t bufferRowLength = {};
|
||
|
uint32_t bufferImageHeight = {};
|
||
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
|
||
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
|
||
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferMemoryBarrier
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
||
|
uint32_t srcQueueFamilyIndex_ = {},
|
||
|
uint32_t dstQueueFamilyIndex_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: srcAccessMask( srcAccessMask_ )
|
||
|
, dstAccessMask( dstAccessMask_ )
|
||
|
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
|
||
|
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
|
||
|
, buffer( buffer_ )
|
||
|
, offset( offset_ )
|
||
|
, size( size_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, srcAccessMask( rhs.srcAccessMask )
|
||
|
, dstAccessMask( rhs.dstAccessMask )
|
||
|
, srcQueueFamilyIndex( rhs.srcQueueFamilyIndex )
|
||
|
, dstQueueFamilyIndex( rhs.dstQueueFamilyIndex )
|
||
|
, buffer( rhs.buffer )
|
||
|
, offset( rhs.offset )
|
||
|
, size( rhs.size )
|
||
|
{}
|
||
|
|
||
|
BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
srcAccessMask = srcAccessMask_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
dstAccessMask = dstAccessMask_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
offset = offset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
size = size_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferMemoryBarrier const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
||
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
||
|
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
|
||
|
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
|
||
|
&& ( buffer == rhs.buffer )
|
||
|
&& ( offset == rhs.offset )
|
||
|
&& ( size == rhs.size );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
||
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
||
|
uint32_t srcQueueFamilyIndex = {};
|
||
|
uint32_t dstQueueFamilyIndex = {};
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferMemoryRequirementsInfo2
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: buffer( buffer_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, buffer( rhs.buffer )
|
||
|
{}
|
||
|
|
||
|
BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( buffer == rhs.buffer );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
|
||
|
|
||
|
struct BufferViewCreateInfo
|
||
|
{
|
||
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
|
||
|
: flags( flags_ )
|
||
|
, buffer( buffer_ )
|
||
|
, format( format_ )
|
||
|
, offset( offset_ )
|
||
|
, range( range_ )
|
||
|
{}
|
||
|
|
||
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
: pNext( rhs.pNext )
|
||
|
, flags( rhs.flags )
|
||
|
, buffer( rhs.buffer )
|
||
|
, format( rhs.format )
|
||
|
, offset( rhs.offset )
|
||
|
, range( rhs.range )
|
||
|
{}
|
||
|
|
||
|
BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
memcpy( &pNext, &rhs.pNext, sizeof( BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) );
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = rhs;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>(&rhs);
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
pNext = pNext_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
flags = flags_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
buffer = buffer_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
format = format_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
offset = offset_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
range = range_;
|
||
|
return *this;
|
||
|
}
|
||
|
|
||
|
operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
|
||
|
}
|
||
|
|
||
|
operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
|
||
|
}
|
||
|
|
||
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
||
|
auto operator<=>( BufferViewCreateInfo const& ) const = default;
|
||
|
#else
|
||
|
bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return ( sType == rhs.sType )
|
||
|
&& ( pNext == rhs.pNext )
|
||
|
&& ( flags == rhs.flags )
|
||
|
&& ( buffer == rhs.buffer )
|
||
|
&& ( format == rhs.format )
|
||
|
&& ( offset == rhs.offset )
|
||
|
&& ( range == rhs.range );
|
||
|
}
|
||
|
|
||
|
bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
||
|
{
|
||
|
return !operator==( rhs );
|
||
|
}
|
||
|
#endif
|
||
|
|
||
|
public:
|
||
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
|
||
|
const void* pNext = {};
|
||
|
VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
|
||
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
||
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
||
|
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
|
||
|
};
|
||
|
static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
|
||
|
static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
||
|
} // namespace VULKAN_HPP_NAMESPACE
|