skyline/app/libraries/VulkanHpp/vulkan/internal/struct/VkDevice.hpp

2201 lines
91 KiB
C++
Raw Normal View History

// Copyright (c) 2015-2019 The Khronos Group Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ---- Exceptions to the Apache 2.0 License: ----
//
// As an exception, if you use this Software to generate code and portions of
// this Software are embedded into the generated code as a result, you may
// redistribute such product without providing attribution as would otherwise
// be required by Sections 4(a), 4(b) and 4(d) of the License.
//
// In addition, if you combine or link code generated by this Software with
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
// ("`Combined Software`") and if a court of competent jurisdiction determines
// that the patent provision (Section 3), the indemnity provision (Section 9)
// or other Section of the License conflicts with the conditions of the
// applicable GPL or LGPL license, you may retroactively and prospectively
// choose to deem waived or otherwise exclude such Section(s) of the License,
// but only in their entirety and only with respect to the Combined Software.
//
// This header is generated from the Khronos Vulkan XML API Registry.
#pragma once
#include "../handles.hpp"
#include "VkAcquire.hpp"
#include "VkAcceleration.hpp"
#include "VkApplication.hpp"
#include "VkAllocation.hpp"
#include "VkBind.hpp"
#include "VkCooperative.hpp"
#include "VkAndroid.hpp"
#include "VkDescriptor.hpp"
#include "VkBase.hpp"
#include "VkAttachment.hpp"
#include "VkBuffer.hpp"
#include "VkCalibrated.hpp"
#include "VkCheckpoint.hpp"
#include "VkConformance.hpp"
#include "VkClear.hpp"
#include "VkCmd.hpp"
#include "VkCoarse.hpp"
#include "VkCommand.hpp"
#include "VkComponent.hpp"
#include "VkCopy.hpp"
#include "VkCompute.hpp"
#include "VkConditional.hpp"
#include "VkD3D.hpp"
#include "VkDebug.hpp"
#include "VkDedicated.hpp"
#include "VkDevice.hpp"
namespace VULKAN_HPP_NAMESPACE
{
struct DeviceQueueCreateInfo
{
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
uint32_t queueFamilyIndex_ = {},
uint32_t queueCount_ = {},
const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
, queueCount( queueCount_ )
, pQueuePriorities( pQueuePriorities_ )
{}
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, flags( rhs.flags )
, queueFamilyIndex( rhs.queueFamilyIndex )
, queueCount( rhs.queueCount )
, pQueuePriorities( rhs.pQueuePriorities )
{}
DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) );
return *this;
}
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>(&rhs);
return *this;
}
DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
{
queueCount = queueCount_;
return *this;
}
DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
{
pQueuePriorities = pQueuePriorities_;
return *this;
}
operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
}
operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
#else
bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( queueCount == rhs.queueCount )
&& ( pQueuePriorities == rhs.pQueuePriorities );
}
bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
uint32_t queueCount = {};
const float* pQueuePriorities = {};
};
static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceFeatures
{
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
: robustBufferAccess( robustBufferAccess_ )
, fullDrawIndexUint32( fullDrawIndexUint32_ )
, imageCubeArray( imageCubeArray_ )
, independentBlend( independentBlend_ )
, geometryShader( geometryShader_ )
, tessellationShader( tessellationShader_ )
, sampleRateShading( sampleRateShading_ )
, dualSrcBlend( dualSrcBlend_ )
, logicOp( logicOp_ )
, multiDrawIndirect( multiDrawIndirect_ )
, drawIndirectFirstInstance( drawIndirectFirstInstance_ )
, depthClamp( depthClamp_ )
, depthBiasClamp( depthBiasClamp_ )
, fillModeNonSolid( fillModeNonSolid_ )
, depthBounds( depthBounds_ )
, wideLines( wideLines_ )
, largePoints( largePoints_ )
, alphaToOne( alphaToOne_ )
, multiViewport( multiViewport_ )
, samplerAnisotropy( samplerAnisotropy_ )
, textureCompressionETC2( textureCompressionETC2_ )
, textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
, textureCompressionBC( textureCompressionBC_ )
, occlusionQueryPrecise( occlusionQueryPrecise_ )
, pipelineStatisticsQuery( pipelineStatisticsQuery_ )
, vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
, fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
, shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
, shaderImageGatherExtended( shaderImageGatherExtended_ )
, shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
, shaderStorageImageMultisample( shaderStorageImageMultisample_ )
, shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
, shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
, shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
, shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
, shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
, shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
, shaderClipDistance( shaderClipDistance_ )
, shaderCullDistance( shaderCullDistance_ )
, shaderFloat64( shaderFloat64_ )
, shaderInt64( shaderInt64_ )
, shaderInt16( shaderInt16_ )
, shaderResourceResidency( shaderResourceResidency_ )
, shaderResourceMinLod( shaderResourceMinLod_ )
, sparseBinding( sparseBinding_ )
, sparseResidencyBuffer( sparseResidencyBuffer_ )
, sparseResidencyImage2D( sparseResidencyImage2D_ )
, sparseResidencyImage3D( sparseResidencyImage3D_ )
, sparseResidency2Samples( sparseResidency2Samples_ )
, sparseResidency4Samples( sparseResidency4Samples_ )
, sparseResidency8Samples( sparseResidency8Samples_ )
, sparseResidency16Samples( sparseResidency16Samples_ )
, sparseResidencyAliased( sparseResidencyAliased_ )
, variableMultisampleRate( variableMultisampleRate_ )
, inheritedQueries( inheritedQueries_ )
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
: robustBufferAccess( rhs.robustBufferAccess )
, fullDrawIndexUint32( rhs.fullDrawIndexUint32 )
, imageCubeArray( rhs.imageCubeArray )
, independentBlend( rhs.independentBlend )
, geometryShader( rhs.geometryShader )
, tessellationShader( rhs.tessellationShader )
, sampleRateShading( rhs.sampleRateShading )
, dualSrcBlend( rhs.dualSrcBlend )
, logicOp( rhs.logicOp )
, multiDrawIndirect( rhs.multiDrawIndirect )
, drawIndirectFirstInstance( rhs.drawIndirectFirstInstance )
, depthClamp( rhs.depthClamp )
, depthBiasClamp( rhs.depthBiasClamp )
, fillModeNonSolid( rhs.fillModeNonSolid )
, depthBounds( rhs.depthBounds )
, wideLines( rhs.wideLines )
, largePoints( rhs.largePoints )
, alphaToOne( rhs.alphaToOne )
, multiViewport( rhs.multiViewport )
, samplerAnisotropy( rhs.samplerAnisotropy )
, textureCompressionETC2( rhs.textureCompressionETC2 )
, textureCompressionASTC_LDR( rhs.textureCompressionASTC_LDR )
, textureCompressionBC( rhs.textureCompressionBC )
, occlusionQueryPrecise( rhs.occlusionQueryPrecise )
, pipelineStatisticsQuery( rhs.pipelineStatisticsQuery )
, vertexPipelineStoresAndAtomics( rhs.vertexPipelineStoresAndAtomics )
, fragmentStoresAndAtomics( rhs.fragmentStoresAndAtomics )
, shaderTessellationAndGeometryPointSize( rhs.shaderTessellationAndGeometryPointSize )
, shaderImageGatherExtended( rhs.shaderImageGatherExtended )
, shaderStorageImageExtendedFormats( rhs.shaderStorageImageExtendedFormats )
, shaderStorageImageMultisample( rhs.shaderStorageImageMultisample )
, shaderStorageImageReadWithoutFormat( rhs.shaderStorageImageReadWithoutFormat )
, shaderStorageImageWriteWithoutFormat( rhs.shaderStorageImageWriteWithoutFormat )
, shaderUniformBufferArrayDynamicIndexing( rhs.shaderUniformBufferArrayDynamicIndexing )
, shaderSampledImageArrayDynamicIndexing( rhs.shaderSampledImageArrayDynamicIndexing )
, shaderStorageBufferArrayDynamicIndexing( rhs.shaderStorageBufferArrayDynamicIndexing )
, shaderStorageImageArrayDynamicIndexing( rhs.shaderStorageImageArrayDynamicIndexing )
, shaderClipDistance( rhs.shaderClipDistance )
, shaderCullDistance( rhs.shaderCullDistance )
, shaderFloat64( rhs.shaderFloat64 )
, shaderInt64( rhs.shaderInt64 )
, shaderInt16( rhs.shaderInt16 )
, shaderResourceResidency( rhs.shaderResourceResidency )
, shaderResourceMinLod( rhs.shaderResourceMinLod )
, sparseBinding( rhs.sparseBinding )
, sparseResidencyBuffer( rhs.sparseResidencyBuffer )
, sparseResidencyImage2D( rhs.sparseResidencyImage2D )
, sparseResidencyImage3D( rhs.sparseResidencyImage3D )
, sparseResidency2Samples( rhs.sparseResidency2Samples )
, sparseResidency4Samples( rhs.sparseResidency4Samples )
, sparseResidency8Samples( rhs.sparseResidency8Samples )
, sparseResidency16Samples( rhs.sparseResidency16Samples )
, sparseResidencyAliased( rhs.sparseResidencyAliased )
, variableMultisampleRate( rhs.variableMultisampleRate )
, inheritedQueries( rhs.inheritedQueries )
{}
PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( static_cast<void*>(this), &rhs, sizeof( PhysicalDeviceFeatures ) );
return *this;
}
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>(&rhs);
return *this;
}
PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess = robustBufferAccess_;
return *this;
}
PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
{
fullDrawIndexUint32 = fullDrawIndexUint32_;
return *this;
}
PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
{
imageCubeArray = imageCubeArray_;
return *this;
}
PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
{
independentBlend = independentBlend_;
return *this;
}
PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
{
geometryShader = geometryShader_;
return *this;
}
PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
tessellationShader = tessellationShader_;
return *this;
}
PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
{
sampleRateShading = sampleRateShading_;
return *this;
}
PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
{
dualSrcBlend = dualSrcBlend_;
return *this;
}
PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
{
multiDrawIndirect = multiDrawIndirect_;
return *this;
}
PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectFirstInstance = drawIndirectFirstInstance_;
return *this;
}
PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthClamp = depthClamp_;
return *this;
}
PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
{
fillModeNonSolid = fillModeNonSolid_;
return *this;
}
PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
{
depthBounds = depthBounds_;
return *this;
}
PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
{
wideLines = wideLines_;
return *this;
}
PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
{
largePoints = largePoints_;
return *this;
}
PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOne = alphaToOne_;
return *this;
}
PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
{
multiViewport = multiViewport_;
return *this;
}
PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
{
samplerAnisotropy = samplerAnisotropy_;
return *this;
}
PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionETC2 = textureCompressionETC2_;
return *this;
}
PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
return *this;
}
PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionBC = textureCompressionBC_;
return *this;
}
PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryPrecise = occlusionQueryPrecise_;
return *this;
}
PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatisticsQuery = pipelineStatisticsQuery_;
return *this;
}
PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
return *this;
}
PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
return *this;
}
PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
{
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
return *this;
}
PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageGatherExtended = shaderImageGatherExtended_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageMultisample = shaderStorageImageMultisample_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
return *this;
}
PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
return *this;
}
PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
return *this;
}
PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
return *this;
}
PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderClipDistance = shaderClipDistance_;
return *this;
}
PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderCullDistance = shaderCullDistance_;
return *this;
}
PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat64 = shaderFloat64_;
return *this;
}
PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt64 = shaderInt64_;
return *this;
}
PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt16 = shaderInt16_;
return *this;
}
PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceResidency = shaderResourceResidency_;
return *this;
}
PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceMinLod = shaderResourceMinLod_;
return *this;
}
PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
{
sparseBinding = sparseBinding_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyBuffer = sparseResidencyBuffer_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage2D = sparseResidencyImage2D_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage3D = sparseResidencyImage3D_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency2Samples = sparseResidency2Samples_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency4Samples = sparseResidency4Samples_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency8Samples = sparseResidency8Samples_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency16Samples = sparseResidency16Samples_;
return *this;
}
PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyAliased = sparseResidencyAliased_;
return *this;
}
PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
{
variableMultisampleRate = variableMultisampleRate_;
return *this;
}
PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
{
inheritedQueries = inheritedQueries_;
return *this;
}
operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
}
operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
#else
bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( robustBufferAccess == rhs.robustBufferAccess )
&& ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
&& ( imageCubeArray == rhs.imageCubeArray )
&& ( independentBlend == rhs.independentBlend )
&& ( geometryShader == rhs.geometryShader )
&& ( tessellationShader == rhs.tessellationShader )
&& ( sampleRateShading == rhs.sampleRateShading )
&& ( dualSrcBlend == rhs.dualSrcBlend )
&& ( logicOp == rhs.logicOp )
&& ( multiDrawIndirect == rhs.multiDrawIndirect )
&& ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
&& ( depthClamp == rhs.depthClamp )
&& ( depthBiasClamp == rhs.depthBiasClamp )
&& ( fillModeNonSolid == rhs.fillModeNonSolid )
&& ( depthBounds == rhs.depthBounds )
&& ( wideLines == rhs.wideLines )
&& ( largePoints == rhs.largePoints )
&& ( alphaToOne == rhs.alphaToOne )
&& ( multiViewport == rhs.multiViewport )
&& ( samplerAnisotropy == rhs.samplerAnisotropy )
&& ( textureCompressionETC2 == rhs.textureCompressionETC2 )
&& ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
&& ( textureCompressionBC == rhs.textureCompressionBC )
&& ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
&& ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
&& ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
&& ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
&& ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
&& ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
&& ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
&& ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
&& ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
&& ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
&& ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
&& ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
&& ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
&& ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
&& ( shaderClipDistance == rhs.shaderClipDistance )
&& ( shaderCullDistance == rhs.shaderCullDistance )
&& ( shaderFloat64 == rhs.shaderFloat64 )
&& ( shaderInt64 == rhs.shaderInt64 )
&& ( shaderInt16 == rhs.shaderInt16 )
&& ( shaderResourceResidency == rhs.shaderResourceResidency )
&& ( shaderResourceMinLod == rhs.shaderResourceMinLod )
&& ( sparseBinding == rhs.sparseBinding )
&& ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
&& ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
&& ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
&& ( sparseResidency2Samples == rhs.sparseResidency2Samples )
&& ( sparseResidency4Samples == rhs.sparseResidency4Samples )
&& ( sparseResidency8Samples == rhs.sparseResidency8Samples )
&& ( sparseResidency16Samples == rhs.sparseResidency16Samples )
&& ( sparseResidencyAliased == rhs.sparseResidencyAliased )
&& ( variableMultisampleRate == rhs.variableMultisampleRate )
&& ( inheritedQueries == rhs.inheritedQueries );
}
bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
};
static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
struct DeviceCreateInfo
{
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
uint32_t queueCreateInfoCount_ = {},
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {},
uint32_t enabledLayerCount_ = {},
const char* const* ppEnabledLayerNames_ = {},
uint32_t enabledExtensionCount_ = {},
const char* const* ppEnabledExtensionNames_ = {},
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, queueCreateInfoCount( queueCreateInfoCount_ )
, pQueueCreateInfos( pQueueCreateInfos_ )
, enabledLayerCount( enabledLayerCount_ )
, ppEnabledLayerNames( ppEnabledLayerNames_ )
, enabledExtensionCount( enabledExtensionCount_ )
, ppEnabledExtensionNames( ppEnabledExtensionNames_ )
, pEnabledFeatures( pEnabledFeatures_ )
{}
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, flags( rhs.flags )
, queueCreateInfoCount( rhs.queueCreateInfoCount )
, pQueueCreateInfos( rhs.pQueueCreateInfos )
, enabledLayerCount( rhs.enabledLayerCount )
, ppEnabledLayerNames( rhs.ppEnabledLayerNames )
, enabledExtensionCount( rhs.enabledExtensionCount )
, ppEnabledExtensionNames( rhs.ppEnabledExtensionNames )
, pEnabledFeatures( rhs.pEnabledFeatures )
{}
DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) );
return *this;
}
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>(&rhs);
return *this;
}
DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
queueCreateInfoCount = queueCreateInfoCount_;
return *this;
}
DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
pQueueCreateInfos = pQueueCreateInfos_;
return *this;
}
DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = enabledLayerCount_;
return *this;
}
DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledFeatures = pEnabledFeatures_;
return *this;
}
operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
}
operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceCreateInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceCreateInfo const& ) const = default;
#else
bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueCreateInfoCount == rhs.queueCreateInfoCount )
&& ( pQueueCreateInfos == rhs.pQueueCreateInfos )
&& ( enabledLayerCount == rhs.enabledLayerCount )
&& ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
&& ( enabledExtensionCount == rhs.enabledExtensionCount )
&& ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
&& ( pEnabledFeatures == rhs.pEnabledFeatures );
}
bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
uint32_t queueCreateInfoCount = {};
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
uint32_t enabledLayerCount = {};
const char* const* ppEnabledLayerNames = {};
uint32_t enabledExtensionCount = {};
const char* const* ppEnabledExtensionNames = {};
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
};
static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceEventInfoEXT
{
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
: deviceEvent( deviceEvent_ )
{}
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, deviceEvent( rhs.deviceEvent )
{}
DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) );
return *this;
}
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>(&rhs);
return *this;
}
DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
{
deviceEvent = deviceEvent_;
return *this;
}
operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
}
operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceEventInfoEXT const& ) const = default;
#else
bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceEvent == rhs.deviceEvent );
}
bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
};
static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct DeviceGeneratedCommandsFeaturesNVX
{
VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT
: computeBindingPointSupport( computeBindingPointSupport_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( DeviceGeneratedCommandsFeaturesNVX const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, computeBindingPointSupport( rhs.computeBindingPointSupport )
{}
DeviceGeneratedCommandsFeaturesNVX & operator=( DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) );
return *this;
}
DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const *>(&rhs);
return *this;
}
DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT
{
computeBindingPointSupport = computeBindingPointSupport_;
return *this;
}
operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>( this );
}
operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGeneratedCommandsFeaturesNVX const& ) const = default;
#else
bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( computeBindingPointSupport == rhs.computeBindingPointSupport );
}
bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {};
};
static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGeneratedCommandsFeaturesNVX>::value, "struct wrapper is not a standard layout!" );
struct DeviceGeneratedCommandsLimitsNVX
{
VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {},
uint32_t maxObjectEntryCounts_ = {},
uint32_t minSequenceCountBufferOffsetAlignment_ = {},
uint32_t minSequenceIndexBufferOffsetAlignment_ = {},
uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
: maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
, maxObjectEntryCounts( maxObjectEntryCounts_ )
, minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
, minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
, minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( DeviceGeneratedCommandsLimitsNVX const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, maxIndirectCommandsLayoutTokenCount( rhs.maxIndirectCommandsLayoutTokenCount )
, maxObjectEntryCounts( rhs.maxObjectEntryCounts )
, minSequenceCountBufferOffsetAlignment( rhs.minSequenceCountBufferOffsetAlignment )
, minSequenceIndexBufferOffsetAlignment( rhs.minSequenceIndexBufferOffsetAlignment )
, minCommandsTokenBufferOffsetAlignment( rhs.minCommandsTokenBufferOffsetAlignment )
{}
DeviceGeneratedCommandsLimitsNVX & operator=( DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) );
return *this;
}
DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const *>(&rhs);
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT
{
maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
{
maxObjectEntryCounts = maxObjectEntryCounts_;
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
{
minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
{
minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
return *this;
}
DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
{
minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
return *this;
}
operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>( this );
}
operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGeneratedCommandsLimitsNVX const& ) const = default;
#else
bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
&& ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
&& ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
&& ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
&& ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
}
bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
const void* pNext = {};
uint32_t maxIndirectCommandsLayoutTokenCount = {};
uint32_t maxObjectEntryCounts = {};
uint32_t minSequenceCountBufferOffsetAlignment = {};
uint32_t minSequenceIndexBufferOffsetAlignment = {};
uint32_t minCommandsTokenBufferOffsetAlignment = {};
};
static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGeneratedCommandsLimitsNVX>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupBindSparseInfo
{
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {},
uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
: resourceDeviceIndex( resourceDeviceIndex_ )
, memoryDeviceIndex( memoryDeviceIndex_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, resourceDeviceIndex( rhs.resourceDeviceIndex )
, memoryDeviceIndex( rhs.memoryDeviceIndex )
{}
DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) );
return *this;
}
DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>(&rhs);
return *this;
}
DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
resourceDeviceIndex = resourceDeviceIndex_;
return *this;
}
DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
memoryDeviceIndex = memoryDeviceIndex_;
return *this;
}
operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
}
operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
#else
bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( resourceDeviceIndex == rhs.resourceDeviceIndex )
&& ( memoryDeviceIndex == rhs.memoryDeviceIndex );
}
bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
const void* pNext = {};
uint32_t resourceDeviceIndex = {};
uint32_t memoryDeviceIndex = {};
};
static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupCommandBufferBeginInfo
{
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
: deviceMask( deviceMask_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, deviceMask( rhs.deviceMask )
{}
DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) );
return *this;
}
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>(&rhs);
return *this;
}
DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
}
operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
#else
bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceMask == rhs.deviceMask );
}
bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
const void* pNext = {};
uint32_t deviceMask = {};
};
static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupDeviceCreateInfo
{
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {},
const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT
: physicalDeviceCount( physicalDeviceCount_ )
, pPhysicalDevices( pPhysicalDevices_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, physicalDeviceCount( rhs.physicalDeviceCount )
, pPhysicalDevices( rhs.pPhysicalDevices )
{}
DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) );
return *this;
}
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>(&rhs);
return *this;
}
DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
{
physicalDeviceCount = physicalDeviceCount_;
return *this;
}
DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
pPhysicalDevices = pPhysicalDevices_;
return *this;
}
operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
}
operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default;
#else
bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
&& ( pPhysicalDevices == rhs.pPhysicalDevices );
}
bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
const void* pNext = {};
uint32_t physicalDeviceCount = {};
const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
};
static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupPresentCapabilitiesKHR
{
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {},
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
: presentMask{}
, modes( modes_ )
{
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,VK_MAX_DEVICE_GROUP_SIZE>::copy( presentMask, presentMask_ );
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, presentMask{}
, modes( rhs.modes )
{
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,VK_MAX_DEVICE_GROUP_SIZE>::copy( presentMask, rhs.presentMask );
}
DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) );
return *this;
}
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>(&rhs);
return *this;
}
operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default;
#else
bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 )
&& ( modes == rhs.modes );
}
bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
const void* pNext = {};
uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupPresentInfoKHR
{
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {},
const uint32_t* pDeviceMasks_ = {},
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
: swapchainCount( swapchainCount_ )
, pDeviceMasks( pDeviceMasks_ )
, mode( mode_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, swapchainCount( rhs.swapchainCount )
, pDeviceMasks( rhs.pDeviceMasks )
, mode( rhs.mode )
{}
DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) );
return *this;
}
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>(&rhs);
return *this;
}
DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceMasks = pDeviceMasks_;
return *this;
}
DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
}
operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default;
#else
bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pDeviceMasks == rhs.pDeviceMasks )
&& ( mode == rhs.mode );
}
bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
const void* pNext = {};
uint32_t swapchainCount = {};
const uint32_t* pDeviceMasks = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
};
static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupRenderPassBeginInfo
{
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
uint32_t deviceRenderAreaCount_ = {},
const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT
: deviceMask( deviceMask_ )
, deviceRenderAreaCount( deviceRenderAreaCount_ )
, pDeviceRenderAreas( pDeviceRenderAreas_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, deviceMask( rhs.deviceMask )
, deviceRenderAreaCount( rhs.deviceRenderAreaCount )
, pDeviceRenderAreas( rhs.pDeviceRenderAreas )
{}
DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) );
return *this;
}
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>(&rhs);
return *this;
}
DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceRenderAreaCount = deviceRenderAreaCount_;
return *this;
}
DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceRenderAreas = pDeviceRenderAreas_;
return *this;
}
operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
}
operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default;
#else
bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceMask == rhs.deviceMask )
&& ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
&& ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
}
bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
const void* pNext = {};
uint32_t deviceMask = {};
uint32_t deviceRenderAreaCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
};
static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupSubmitInfo
{
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
const uint32_t* pWaitSemaphoreDeviceIndices_ = {},
uint32_t commandBufferCount_ = {},
const uint32_t* pCommandBufferDeviceMasks_ = {},
uint32_t signalSemaphoreCount_ = {},
const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
: waitSemaphoreCount( waitSemaphoreCount_ )
, pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
, commandBufferCount( commandBufferCount_ )
, pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
, signalSemaphoreCount( signalSemaphoreCount_ )
, pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, waitSemaphoreCount( rhs.waitSemaphoreCount )
, pWaitSemaphoreDeviceIndices( rhs.pWaitSemaphoreDeviceIndices )
, commandBufferCount( rhs.commandBufferCount )
, pCommandBufferDeviceMasks( rhs.pCommandBufferDeviceMasks )
, signalSemaphoreCount( rhs.signalSemaphoreCount )
, pSignalSemaphoreDeviceIndices( rhs.pSignalSemaphoreDeviceIndices )
{}
DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) );
return *this;
}
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>(&rhs);
return *this;
}
DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
return *this;
}
DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
return *this;
}
DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
return *this;
}
operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
}
operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupSubmitInfo const& ) const = default;
#else
bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
&& ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
&& ( commandBufferCount == rhs.commandBufferCount )
&& ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
&& ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
}
bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
const void* pNext = {};
uint32_t waitSemaphoreCount = {};
const uint32_t* pWaitSemaphoreDeviceIndices = {};
uint32_t commandBufferCount = {};
const uint32_t* pCommandBufferDeviceMasks = {};
uint32_t signalSemaphoreCount = {};
const uint32_t* pSignalSemaphoreDeviceIndices = {};
};
static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
struct DeviceGroupSwapchainCreateInfoKHR
{
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
: modes( modes_ )
{}
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, modes( rhs.modes )
{}
DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) );
return *this;
}
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>(&rhs);
return *this;
}
DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
{
modes = modes_;
return *this;
}
operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default;
#else
bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( modes == rhs.modes );
}
bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct DeviceMemoryOverallocationCreateInfoAMD
{
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
: overallocationBehavior( overallocationBehavior_ )
{}
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, overallocationBehavior( rhs.overallocationBehavior )
{}
DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) );
return *this;
}
DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>(&rhs);
return *this;
}
DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
{
overallocationBehavior = overallocationBehavior_;
return *this;
}
operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default;
#else
bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( overallocationBehavior == rhs.overallocationBehavior );
}
bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
};
static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
struct DeviceQueueGlobalPriorityCreateInfoEXT
{
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
: globalPriority( globalPriority_ )
{}
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, globalPriority( rhs.globalPriority )
{}
DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) );
return *this;
}
DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>(&rhs);
return *this;
}
DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
{
globalPriority = globalPriority_;
return *this;
}
operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
}
operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default;
#else
bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( globalPriority == rhs.globalPriority );
}
bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
};
static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct DeviceQueueInfo2
{
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
uint32_t queueFamilyIndex_ = {},
uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
, queueIndex( queueIndex_ )
{}
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT
: pNext( rhs.pNext )
, flags( rhs.flags )
, queueFamilyIndex( rhs.queueFamilyIndex )
, queueIndex( rhs.queueIndex )
{}
DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) );
return *this;
}
DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = rhs;
}
DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>(&rhs);
return *this;
}
DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueIndex = queueIndex_;
return *this;
}
operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
}
operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
}
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueInfo2 const& ) const = default;
#else
bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( queueIndex == rhs.queueIndex );
}
bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
const void* pNext = {};
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
uint32_t queueIndex = {};
};
static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
} // namespace VULKAN_HPP_NAMESPACE