mirror of
https://github.com/skyline-emu/skyline.git
synced 2024-11-16 22:29:17 +01:00
500b49d329
This commit adds Vulkan-Hpp as a library to the project. The headers are from a modified version of `VulkanHppGenerator`. They are broken into multiple files to avoid exceeding the Intellisense file size limit of Android Studio.
8925 lines
429 KiB
C++
8925 lines
429 KiB
C++
// Copyright (c) 2015-2019 The Khronos Group Inc.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
//
|
|
// ---- Exceptions to the Apache 2.0 License: ----
|
|
//
|
|
// As an exception, if you use this Software to generate code and portions of
|
|
// this Software are embedded into the generated code as a result, you may
|
|
// redistribute such product without providing attribution as would otherwise
|
|
// be required by Sections 4(a), 4(b) and 4(d) of the License.
|
|
//
|
|
// In addition, if you combine or link code generated by this Software with
|
|
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
|
|
// ("`Combined Software`") and if a court of competent jurisdiction determines
|
|
// that the patent provision (Section 3), the indemnity provision (Section 9)
|
|
// or other Section of the License conflicts with the conditions of the
|
|
// applicable GPL or LGPL license, you may retroactively and prospectively
|
|
// choose to deem waived or otherwise exclude such Section(s) of the License,
|
|
// but only in their entirety and only with respect to the Combined Software.
|
|
//
|
|
|
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
|
|
|
#pragma once
|
|
|
|
#include "../handles.hpp"
|
|
#include "VkAcquire.hpp"
|
|
#include "VkAcceleration.hpp"
|
|
#include "VkApplication.hpp"
|
|
#include "VkInitialize.hpp"
|
|
#include "VkAllocation.hpp"
|
|
#include "VkExternal.hpp"
|
|
#include "VkBind.hpp"
|
|
#include "VkObject.hpp"
|
|
#include "VkCooperative.hpp"
|
|
#include "VkAndroid.hpp"
|
|
#include "VkImport.hpp"
|
|
#include "VkImage.hpp"
|
|
#include "VkDescriptor.hpp"
|
|
#include "VkBase.hpp"
|
|
#include "VkAttachment.hpp"
|
|
#include "VkBuffer.hpp"
|
|
#include "VkFramebuffer.hpp"
|
|
#include "VkCalibrated.hpp"
|
|
#include "VkDevice.hpp"
|
|
#include "VkCheckpoint.hpp"
|
|
#include "VkConformance.hpp"
|
|
#include "VkClear.hpp"
|
|
#include "VkCmd.hpp"
|
|
#include "VkExtension.hpp"
|
|
#include "VkCoarse.hpp"
|
|
#include "VkCommand.hpp"
|
|
#include "VkMetal.hpp"
|
|
#include "VkFormat.hpp"
|
|
#include "VkComponent.hpp"
|
|
#include "VkCopy.hpp"
|
|
#include "VkCompute.hpp"
|
|
#include "VkPast.hpp"
|
|
#include "VkConditional.hpp"
|
|
#include "VkMapped.hpp"
|
|
#include "VkD3D.hpp"
|
|
#include "VkDebug.hpp"
|
|
#include "VkFence.hpp"
|
|
#include "VkDedicated.hpp"
|
|
#include "VkDraw.hpp"
|
|
#include "VkDispatch.hpp"
|
|
#include "VkDisplay.hpp"
|
|
#include "VkDrm.hpp"
|
|
#include "VkEvent.hpp"
|
|
#include "VkExport.hpp"
|
|
#include "VkExtent.hpp"
|
|
#include "VkPerformance.hpp"
|
|
#include "VkFilter.hpp"
|
|
#include "VkGeometry.hpp"
|
|
#include "VkGraphics.hpp"
|
|
#include "VkHdr.hpp"
|
|
#include "VkHeadless.hpp"
|
|
#include "VkMultisample.hpp"
|
|
#include "VkI.hpp"
|
|
#include "VkIndirect.hpp"
|
|
#include "VkInput.hpp"
|
|
#include "VkOffset.hpp"
|
|
#include "VkMemory.hpp"
|
|
#include "VkInstance.hpp"
|
|
#include "VkLayer.hpp"
|
|
#include "VkMac.hpp"
|
|
#include "VkPhysical.hpp"
|
|
|
|
namespace VULKAN_HPP_NAMESPACE
|
|
{
|
|
struct PhysicalDevice16BitStorageFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: storageBuffer16BitAccess( storageBuffer16BitAccess_ )
|
|
, uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
|
|
, storagePushConstant16( storagePushConstant16_ )
|
|
, storageInputOutput16( storageInputOutput16_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, storageBuffer16BitAccess( rhs.storageBuffer16BitAccess )
|
|
, uniformAndStorageBuffer16BitAccess( rhs.uniformAndStorageBuffer16BitAccess )
|
|
, storagePushConstant16( rhs.storagePushConstant16 )
|
|
, storageInputOutput16( rhs.storageInputOutput16 )
|
|
{}
|
|
|
|
PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer16BitAccess = storageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant16 = storagePushConstant16_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageInputOutput16 = storageInputOutput16_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
|
|
&& ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
|
|
&& ( storagePushConstant16 == rhs.storagePushConstant16 )
|
|
&& ( storageInputOutput16 == rhs.storageInputOutput16 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDevice8BitStorageFeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: storageBuffer8BitAccess( storageBuffer8BitAccess_ )
|
|
, uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
|
|
, storagePushConstant8( storagePushConstant8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, storageBuffer8BitAccess( rhs.storageBuffer8BitAccess )
|
|
, uniformAndStorageBuffer8BitAccess( rhs.uniformAndStorageBuffer8BitAccess )
|
|
, storagePushConstant8( rhs.storagePushConstant8 )
|
|
{}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR & operator=( PhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) - offsetof( PhysicalDevice8BitStorageFeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer8BitAccess = storageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeaturesKHR & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant8 = storagePushConstant8_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevice8BitStorageFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevice8BitStorageFeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
|
|
&& ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
|
|
&& ( storagePushConstant8 == rhs.storagePushConstant8 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceASTCDecodeFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: decodeModeSharedExponent( decodeModeSharedExponent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, decodeModeSharedExponent( rhs.decodeModeSharedExponent )
|
|
{}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
decodeModeSharedExponent = decodeModeSharedExponent_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, advancedBlendCoherentOperations( rhs.advancedBlendCoherentOperations )
|
|
{}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
|
|
, advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
|
|
, advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
|
|
, advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
|
|
, advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
|
|
, advancedBlendAllOperations( advancedBlendAllOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, advancedBlendMaxColorAttachments( rhs.advancedBlendMaxColorAttachments )
|
|
, advancedBlendIndependentBlend( rhs.advancedBlendIndependentBlend )
|
|
, advancedBlendNonPremultipliedSrcColor( rhs.advancedBlendNonPremultipliedSrcColor )
|
|
, advancedBlendNonPremultipliedDstColor( rhs.advancedBlendNonPremultipliedDstColor )
|
|
, advancedBlendCorrelatedOverlap( rhs.advancedBlendCorrelatedOverlap )
|
|
, advancedBlendAllOperations( rhs.advancedBlendAllOperations )
|
|
{}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
|
|
&& ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
|
|
&& ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
|
|
&& ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
|
|
&& ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
|
|
&& ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t advancedBlendMaxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: bufferDeviceAddress( bufferDeviceAddress_ )
|
|
, bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
|
|
, bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, bufferDeviceAddress( rhs.bufferDeviceAddress )
|
|
, bufferDeviceAddressCaptureReplay( rhs.bufferDeviceAddressCaptureReplay )
|
|
, bufferDeviceAddressMultiDevice( rhs.bufferDeviceAddressMultiDevice )
|
|
{}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
|
|
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
|
|
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceCoherentMemoryFeaturesAMD
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: deviceCoherentMemory( deviceCoherentMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, deviceCoherentMemory( rhs.deviceCoherentMemory )
|
|
{}
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceCoherentMemory = deviceCoherentMemory_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceCoherentMemory == rhs.deviceCoherentMemory );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
|
|
, computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, computeDerivativeGroupQuads( rhs.computeDerivativeGroupQuads )
|
|
, computeDerivativeGroupLinear( rhs.computeDerivativeGroupLinear )
|
|
{}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
|
|
&& ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceConditionalRenderingFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: conditionalRendering( conditionalRendering_ )
|
|
, inheritedConditionalRendering( inheritedConditionalRendering_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, conditionalRendering( rhs.conditionalRendering )
|
|
, inheritedConditionalRendering( rhs.inheritedConditionalRendering )
|
|
{}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conditionalRendering = conditionalRendering_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inheritedConditionalRendering = inheritedConditionalRendering_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( conditionalRendering == rhs.conditionalRendering )
|
|
&& ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceConservativeRasterizationPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {},
|
|
float maxExtraPrimitiveOverestimationSize_ = {},
|
|
float extraPrimitiveOverestimationSizeGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: primitiveOverestimationSize( primitiveOverestimationSize_ )
|
|
, maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
|
|
, extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
|
|
, primitiveUnderestimation( primitiveUnderestimation_ )
|
|
, conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
|
|
, degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
|
|
, degenerateLinesRasterized( degenerateLinesRasterized_ )
|
|
, fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
|
|
, conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, primitiveOverestimationSize( rhs.primitiveOverestimationSize )
|
|
, maxExtraPrimitiveOverestimationSize( rhs.maxExtraPrimitiveOverestimationSize )
|
|
, extraPrimitiveOverestimationSizeGranularity( rhs.extraPrimitiveOverestimationSizeGranularity )
|
|
, primitiveUnderestimation( rhs.primitiveUnderestimation )
|
|
, conservativePointAndLineRasterization( rhs.conservativePointAndLineRasterization )
|
|
, degenerateTrianglesRasterized( rhs.degenerateTrianglesRasterized )
|
|
, degenerateLinesRasterized( rhs.degenerateLinesRasterized )
|
|
, fullyCoveredFragmentShaderInputVariable( rhs.fullyCoveredFragmentShaderInputVariable )
|
|
, conservativeRasterizationPostDepthCoverage( rhs.conservativeRasterizationPostDepthCoverage )
|
|
{}
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
|
|
&& ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
|
|
&& ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
|
|
&& ( primitiveUnderestimation == rhs.primitiveUnderestimation )
|
|
&& ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
|
|
&& ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
|
|
&& ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
|
|
&& ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
|
|
&& ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
void* pNext = {};
|
|
float primitiveOverestimationSize = {};
|
|
float maxExtraPrimitiveOverestimationSize = {};
|
|
float extraPrimitiveOverestimationSizeGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceCooperativeMatrixFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: cooperativeMatrix( cooperativeMatrix_ )
|
|
, cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, cooperativeMatrix( rhs.cooperativeMatrix )
|
|
, cooperativeMatrixRobustBufferAccess( rhs.cooperativeMatrixRobustBufferAccess )
|
|
{}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cooperativeMatrix = cooperativeMatrix_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cooperativeMatrix == rhs.cooperativeMatrix )
|
|
&& ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceCooperativeMatrixPropertiesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, cooperativeMatrixSupportedStages( rhs.cooperativeMatrixSupportedStages )
|
|
{}
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceCornerSampledImageFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: cornerSampledImage( cornerSampledImage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, cornerSampledImage( rhs.cornerSampledImage )
|
|
{}
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cornerSampledImage = cornerSampledImage_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cornerSampledImage == rhs.cornerSampledImage );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceCoverageReductionModeFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: coverageReductionMode( coverageReductionMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, coverageReductionMode( rhs.coverageReductionMode )
|
|
{}
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageReductionMode = coverageReductionMode_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( coverageReductionMode == rhs.coverageReductionMode );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, dedicatedAllocationImageAliasing( rhs.dedicatedAllocationImageAliasing )
|
|
{}
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDepthClipEnableFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: depthClipEnable( depthClipEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, depthClipEnable( rhs.depthClipEnable )
|
|
{}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipEnable = depthClipEnable_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( depthClipEnable == rhs.depthClipEnable );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDepthStencilResolvePropertiesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolvePropertiesKHR( VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: supportedDepthResolveModes( supportedDepthResolveModes_ )
|
|
, supportedStencilResolveModes( supportedStencilResolveModes_ )
|
|
, independentResolveNone( independentResolveNone_ )
|
|
, independentResolve( independentResolve_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolvePropertiesKHR( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, supportedDepthResolveModes( rhs.supportedDepthResolveModes )
|
|
, supportedStencilResolveModes( rhs.supportedStencilResolveModes )
|
|
, independentResolveNone( rhs.independentResolveNone )
|
|
, independentResolve( rhs.independentResolve )
|
|
{}
|
|
|
|
PhysicalDeviceDepthStencilResolvePropertiesKHR & operator=( PhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) - offsetof( PhysicalDeviceDepthStencilResolvePropertiesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDepthStencilResolvePropertiesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
|
|
&& ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
|
|
&& ( independentResolveNone == rhs.independentResolveNone )
|
|
&& ( independentResolve == rhs.independentResolve );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolvePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDescriptorIndexingFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
|
|
, shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
|
|
, shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
|
|
, shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
|
|
, shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
|
|
, shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
|
|
, shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
|
|
, descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
|
|
, descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
|
|
, descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
|
|
, descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
|
|
, descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
|
|
, runtimeDescriptorArray( runtimeDescriptorArray_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderInputAttachmentArrayDynamicIndexing( rhs.shaderInputAttachmentArrayDynamicIndexing )
|
|
, shaderUniformTexelBufferArrayDynamicIndexing( rhs.shaderUniformTexelBufferArrayDynamicIndexing )
|
|
, shaderStorageTexelBufferArrayDynamicIndexing( rhs.shaderStorageTexelBufferArrayDynamicIndexing )
|
|
, shaderUniformBufferArrayNonUniformIndexing( rhs.shaderUniformBufferArrayNonUniformIndexing )
|
|
, shaderSampledImageArrayNonUniformIndexing( rhs.shaderSampledImageArrayNonUniformIndexing )
|
|
, shaderStorageBufferArrayNonUniformIndexing( rhs.shaderStorageBufferArrayNonUniformIndexing )
|
|
, shaderStorageImageArrayNonUniformIndexing( rhs.shaderStorageImageArrayNonUniformIndexing )
|
|
, shaderInputAttachmentArrayNonUniformIndexing( rhs.shaderInputAttachmentArrayNonUniformIndexing )
|
|
, shaderUniformTexelBufferArrayNonUniformIndexing( rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
|
|
, shaderStorageTexelBufferArrayNonUniformIndexing( rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
|
|
, descriptorBindingUniformBufferUpdateAfterBind( rhs.descriptorBindingUniformBufferUpdateAfterBind )
|
|
, descriptorBindingSampledImageUpdateAfterBind( rhs.descriptorBindingSampledImageUpdateAfterBind )
|
|
, descriptorBindingStorageImageUpdateAfterBind( rhs.descriptorBindingStorageImageUpdateAfterBind )
|
|
, descriptorBindingStorageBufferUpdateAfterBind( rhs.descriptorBindingStorageBufferUpdateAfterBind )
|
|
, descriptorBindingUniformTexelBufferUpdateAfterBind( rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
|
|
, descriptorBindingStorageTexelBufferUpdateAfterBind( rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
|
|
, descriptorBindingUpdateUnusedWhilePending( rhs.descriptorBindingUpdateUnusedWhilePending )
|
|
, descriptorBindingPartiallyBound( rhs.descriptorBindingPartiallyBound )
|
|
, descriptorBindingVariableDescriptorCount( rhs.descriptorBindingVariableDescriptorCount )
|
|
, runtimeDescriptorArray( rhs.runtimeDescriptorArray )
|
|
{}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & operator=( PhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeaturesEXT & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
runtimeDescriptorArray = runtimeDescriptorArray_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
|
|
&& ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
|
|
&& ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
|
|
&& ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
|
|
&& ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
|
|
&& ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
|
|
&& ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
|
|
&& ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
|
|
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDescriptorIndexingPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingPropertiesEXT( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
|
|
uint32_t maxPerStageUpdateAfterBindResources_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
|
|
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
|
|
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
|
|
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
|
|
, shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
|
|
, robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
|
|
, quadDivergentImplicitLod( quadDivergentImplicitLod_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
|
|
, maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
|
|
, maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
|
|
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingPropertiesEXT( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxUpdateAfterBindDescriptorsInAllPools( rhs.maxUpdateAfterBindDescriptorsInAllPools )
|
|
, shaderUniformBufferArrayNonUniformIndexingNative( rhs.shaderUniformBufferArrayNonUniformIndexingNative )
|
|
, shaderSampledImageArrayNonUniformIndexingNative( rhs.shaderSampledImageArrayNonUniformIndexingNative )
|
|
, shaderStorageBufferArrayNonUniformIndexingNative( rhs.shaderStorageBufferArrayNonUniformIndexingNative )
|
|
, shaderStorageImageArrayNonUniformIndexingNative( rhs.shaderStorageImageArrayNonUniformIndexingNative )
|
|
, shaderInputAttachmentArrayNonUniformIndexingNative( rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
|
|
, robustBufferAccessUpdateAfterBind( rhs.robustBufferAccessUpdateAfterBind )
|
|
, quadDivergentImplicitLod( rhs.quadDivergentImplicitLod )
|
|
, maxPerStageDescriptorUpdateAfterBindSamplers( rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
|
|
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
|
|
, maxPerStageDescriptorUpdateAfterBindSampledImages( rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageImages( rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
|
|
, maxPerStageDescriptorUpdateAfterBindInputAttachments( rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
|
|
, maxPerStageUpdateAfterBindResources( rhs.maxPerStageUpdateAfterBindResources )
|
|
, maxDescriptorSetUpdateAfterBindSamplers( rhs.maxDescriptorSetUpdateAfterBindSamplers )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffers( rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffers( rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
|
|
, maxDescriptorSetUpdateAfterBindSampledImages( rhs.maxDescriptorSetUpdateAfterBindSampledImages )
|
|
, maxDescriptorSetUpdateAfterBindStorageImages( rhs.maxDescriptorSetUpdateAfterBindStorageImages )
|
|
, maxDescriptorSetUpdateAfterBindInputAttachments( rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
|
|
{}
|
|
|
|
PhysicalDeviceDescriptorIndexingPropertiesEXT & operator=( PhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
|
|
&& ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
|
|
&& ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
|
|
&& ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
|
|
&& ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
|
|
uint32_t maxPerStageUpdateAfterBindResources = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDiscardRectanglePropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxDiscardRectangles( maxDiscardRectangles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxDiscardRectangles( rhs.maxDiscardRectangles )
|
|
{}
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxDiscardRectangles == rhs.maxDiscardRectangles );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t maxDiscardRectangles = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceDriverPropertiesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverPropertiesKHR( VULKAN_HPP_NAMESPACE::DriverIdKHR driverID_ = VULKAN_HPP_NAMESPACE::DriverIdKHR::eAmdProprietary,
|
|
std::array<char,VK_MAX_DRIVER_NAME_SIZE_KHR> const& driverName_ = {},
|
|
std::array<char,VK_MAX_DRIVER_INFO_SIZE_KHR> const& driverInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: driverID( driverID_ )
|
|
, driverName{}
|
|
, driverInfo{}
|
|
, conformanceVersion( conformanceVersion_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE_KHR>::copy( driverName, driverName_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE_KHR>::copy( driverInfo, driverInfo_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverPropertiesKHR( PhysicalDeviceDriverPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, driverID( rhs.driverID )
|
|
, driverName{}
|
|
, driverInfo{}
|
|
, conformanceVersion( rhs.conformanceVersion )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE_KHR>::copy( driverName, rhs.driverName );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE_KHR>::copy( driverInfo, rhs.driverInfo );
|
|
}
|
|
|
|
PhysicalDeviceDriverPropertiesKHR & operator=( PhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDriverPropertiesKHR ) - offsetof( PhysicalDeviceDriverPropertiesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDriverPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDriverPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDriverPropertiesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( driverID == rhs.driverID )
|
|
&& ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 )
|
|
&& ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 )
|
|
&& ( conformanceVersion == rhs.conformanceVersion );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DriverIdKHR driverID = VULKAN_HPP_NAMESPACE::DriverIdKHR::eAmdProprietary;
|
|
char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR] = {};
|
|
char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR] = {};
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceDriverPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExclusiveScissorFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: exclusiveScissor( exclusiveScissor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, exclusiveScissor( rhs.exclusiveScissor )
|
|
{}
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
exclusiveScissor = exclusiveScissor_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( exclusiveScissor == rhs.exclusiveScissor );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExternalBufferInfo
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
, usage( usage_ )
|
|
, handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, flags( rhs.flags )
|
|
, usage( rhs.usage )
|
|
, handleType( rhs.handleType )
|
|
{}
|
|
|
|
PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( usage == rhs.usage )
|
|
&& ( handleType == rhs.handleType );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExternalFenceInfo
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, handleType( rhs.handleType )
|
|
{}
|
|
|
|
PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExternalImageFormatInfo
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, handleType( rhs.handleType )
|
|
{}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExternalMemoryHostPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, minImportedHostPointerAlignment( rhs.minImportedHostPointerAlignment )
|
|
{}
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceExternalSemaphoreInfo
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, handleType( rhs.handleType )
|
|
{}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFeatures2
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: features( features_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, features( rhs.features )
|
|
{}
|
|
|
|
PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
features = features_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( features == rhs.features );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFloatControlsPropertiesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: denormBehaviorIndependence( denormBehaviorIndependence_ )
|
|
, roundingModeIndependence( roundingModeIndependence_ )
|
|
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
|
|
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
|
|
, shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
|
|
, shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
|
|
, shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
|
|
, shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
|
|
, shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
|
|
, shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
|
|
, shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
|
|
, shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
|
|
, shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
|
|
, shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
|
|
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
|
|
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
|
|
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsPropertiesKHR( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, denormBehaviorIndependence( rhs.denormBehaviorIndependence )
|
|
, roundingModeIndependence( rhs.roundingModeIndependence )
|
|
, shaderSignedZeroInfNanPreserveFloat16( rhs.shaderSignedZeroInfNanPreserveFloat16 )
|
|
, shaderSignedZeroInfNanPreserveFloat32( rhs.shaderSignedZeroInfNanPreserveFloat32 )
|
|
, shaderSignedZeroInfNanPreserveFloat64( rhs.shaderSignedZeroInfNanPreserveFloat64 )
|
|
, shaderDenormPreserveFloat16( rhs.shaderDenormPreserveFloat16 )
|
|
, shaderDenormPreserveFloat32( rhs.shaderDenormPreserveFloat32 )
|
|
, shaderDenormPreserveFloat64( rhs.shaderDenormPreserveFloat64 )
|
|
, shaderDenormFlushToZeroFloat16( rhs.shaderDenormFlushToZeroFloat16 )
|
|
, shaderDenormFlushToZeroFloat32( rhs.shaderDenormFlushToZeroFloat32 )
|
|
, shaderDenormFlushToZeroFloat64( rhs.shaderDenormFlushToZeroFloat64 )
|
|
, shaderRoundingModeRTEFloat16( rhs.shaderRoundingModeRTEFloat16 )
|
|
, shaderRoundingModeRTEFloat32( rhs.shaderRoundingModeRTEFloat32 )
|
|
, shaderRoundingModeRTEFloat64( rhs.shaderRoundingModeRTEFloat64 )
|
|
, shaderRoundingModeRTZFloat16( rhs.shaderRoundingModeRTZFloat16 )
|
|
, shaderRoundingModeRTZFloat32( rhs.shaderRoundingModeRTZFloat32 )
|
|
, shaderRoundingModeRTZFloat64( rhs.shaderRoundingModeRTZFloat64 )
|
|
{}
|
|
|
|
PhysicalDeviceFloatControlsPropertiesKHR & operator=( PhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) - offsetof( PhysicalDeviceFloatControlsPropertiesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFloatControlsPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFloatControlsPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFloatControlsPropertiesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
|
|
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
|
|
&& ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
|
|
&& ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
|
|
&& ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
|
|
&& ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
|
|
&& ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
|
|
&& ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
|
|
&& ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
|
|
&& ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
|
|
&& ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
|
|
&& ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
|
|
&& ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
|
|
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsPropertiesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFragmentDensityMapFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityMap( fragmentDensityMap_ )
|
|
, fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
|
|
, fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, fragmentDensityMap( rhs.fragmentDensityMap )
|
|
, fragmentDensityMapDynamic( rhs.fragmentDensityMapDynamic )
|
|
, fragmentDensityMapNonSubsampledImages( rhs.fragmentDensityMapNonSubsampledImages )
|
|
{}
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityMap == rhs.fragmentDensityMap )
|
|
&& ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
|
|
&& ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFragmentDensityMapPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
|
|
, maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
|
|
, fragmentDensityInvocations( fragmentDensityInvocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, minFragmentDensityTexelSize( rhs.minFragmentDensityTexelSize )
|
|
, maxFragmentDensityTexelSize( rhs.maxFragmentDensityTexelSize )
|
|
, fragmentDensityInvocations( rhs.fragmentDensityInvocations )
|
|
{}
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
|
|
&& ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
|
|
&& ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: fragmentShaderBarycentric( fragmentShaderBarycentric_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, fragmentShaderBarycentric( rhs.fragmentShaderBarycentric )
|
|
{}
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderBarycentric = fragmentShaderBarycentric_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
|
|
, fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
|
|
, fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, fragmentShaderSampleInterlock( rhs.fragmentShaderSampleInterlock )
|
|
, fragmentShaderPixelInterlock( rhs.fragmentShaderPixelInterlock )
|
|
, fragmentShaderShadingRateInterlock( rhs.fragmentShaderShadingRateInterlock )
|
|
{}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
|
|
&& ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
|
|
&& ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceGroupProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: physicalDeviceCount( physicalDeviceCount_ )
|
|
, physicalDevices{}
|
|
, subsetAllocation( subsetAllocation_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE>::copy( physicalDevices, physicalDevices_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, physicalDeviceCount( rhs.physicalDeviceCount )
|
|
, physicalDevices{}
|
|
, subsetAllocation( rhs.subsetAllocation )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE>::copy( physicalDevices, rhs.physicalDevices );
|
|
}
|
|
|
|
PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
|
|
&& ( memcmp( physicalDevices, rhs.physicalDevices, std::min<uint32_t>( VK_MAX_DEVICE_GROUP_SIZE, physicalDeviceCount ) * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 )
|
|
&& ( subsetAllocation == rhs.subsetAllocation );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
|
|
void* pNext = {};
|
|
uint32_t physicalDeviceCount = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceHostQueryResetFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: hostQueryReset( hostQueryReset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, hostQueryReset( rhs.hostQueryReset )
|
|
{}
|
|
|
|
PhysicalDeviceHostQueryResetFeaturesEXT & operator=( PhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) - offsetof( PhysicalDeviceHostQueryResetFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceHostQueryResetFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceHostQueryResetFeaturesEXT & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostQueryReset = hostQueryReset_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceHostQueryResetFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceHostQueryResetFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceHostQueryResetFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( hostQueryReset == rhs.hostQueryReset );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceIDProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {},
|
|
std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {},
|
|
std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {},
|
|
uint32_t deviceNodeMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: deviceUUID{}
|
|
, driverUUID{}
|
|
, deviceLUID{}
|
|
, deviceNodeMask( deviceNodeMask_ )
|
|
, deviceLUIDValid( deviceLUIDValid_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, deviceUUID{}
|
|
, driverUUID{}
|
|
, deviceLUID{}
|
|
, deviceNodeMask( rhs.deviceNodeMask )
|
|
, deviceLUIDValid( rhs.deviceLUIDValid )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( deviceUUID, rhs.deviceUUID );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( driverUUID, rhs.driverUUID );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE>::copy( deviceLUID, rhs.deviceLUID );
|
|
}
|
|
|
|
PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceIDProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
|
|
&& ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
|
|
&& ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
|
|
&& ( deviceNodeMask == rhs.deviceNodeMask )
|
|
&& ( deviceLUIDValid == rhs.deviceLUIDValid );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
|
|
void* pNext = {};
|
|
uint8_t deviceUUID[VK_UUID_SIZE] = {};
|
|
uint8_t driverUUID[VK_UUID_SIZE] = {};
|
|
uint8_t deviceLUID[VK_LUID_SIZE] = {};
|
|
uint32_t deviceNodeMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {},
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
|
uint32_t queueFamilyIndexCount_ = {},
|
|
const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, drmFormatModifier( rhs.drmFormatModifier )
|
|
, sharingMode( rhs.sharingMode )
|
|
, queueFamilyIndexCount( rhs.queueFamilyIndexCount )
|
|
, pQueueFamilyIndices( rhs.pQueueFamilyIndices )
|
|
{}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifier = drmFormatModifier_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifier == rhs.drmFormatModifier )
|
|
&& ( sharingMode == rhs.sharingMode )
|
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
const void* pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t* pQueueFamilyIndices = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceImageFormatInfo2
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ )
|
|
, type( type_ )
|
|
, tiling( tiling_ )
|
|
, usage( usage_ )
|
|
, flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, format( rhs.format )
|
|
, type( rhs.type )
|
|
, tiling( rhs.tiling )
|
|
, usage( rhs.usage )
|
|
, flags( rhs.flags )
|
|
{}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( type == rhs.type )
|
|
&& ( tiling == rhs.tiling )
|
|
&& ( usage == rhs.usage )
|
|
&& ( flags == rhs.flags );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceImageViewImageFormatInfoEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
|
|
: imageViewType( imageViewType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, imageViewType( rhs.imageViewType )
|
|
{}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewType = imageViewType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageViewType == rhs.imageViewType );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceImagelessFramebufferFeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: imagelessFramebuffer( imagelessFramebuffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, imagelessFramebuffer( rhs.imagelessFramebuffer )
|
|
{}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeaturesKHR & operator=( PhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) - offsetof( PhysicalDeviceImagelessFramebufferFeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeaturesKHR & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagelessFramebuffer = imagelessFramebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImagelessFramebufferFeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceIndexTypeUint8FeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: indexTypeUint8( indexTypeUint8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, indexTypeUint8( rhs.indexTypeUint8 )
|
|
{}
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexTypeUint8 = indexTypeUint8_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( indexTypeUint8 == rhs.indexTypeUint8 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceInlineUniformBlockFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: inlineUniformBlock( inlineUniformBlock_ )
|
|
, descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, inlineUniformBlock( rhs.inlineUniformBlock )
|
|
, descriptorBindingInlineUniformBlockUpdateAfterBind( rhs.descriptorBindingInlineUniformBlockUpdateAfterBind )
|
|
{}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inlineUniformBlock = inlineUniformBlock_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( inlineUniformBlock == rhs.inlineUniformBlock )
|
|
&& ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceInlineUniformBlockPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {},
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
|
|
, maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
|
|
, maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
|
|
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxInlineUniformBlockSize( rhs.maxInlineUniformBlockSize )
|
|
, maxPerStageDescriptorInlineUniformBlocks( rhs.maxPerStageDescriptorInlineUniformBlocks )
|
|
, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
|
|
, maxDescriptorSetInlineUniformBlocks( rhs.maxDescriptorSetInlineUniformBlocks )
|
|
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks )
|
|
{}
|
|
|
|
PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
|
|
&& ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
|
|
&& ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
|
|
&& ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t maxInlineUniformBlockSize = {};
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceLimits
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
|
|
uint32_t maxImageDimension2D_ = {},
|
|
uint32_t maxImageDimension3D_ = {},
|
|
uint32_t maxImageDimensionCube_ = {},
|
|
uint32_t maxImageArrayLayers_ = {},
|
|
uint32_t maxTexelBufferElements_ = {},
|
|
uint32_t maxUniformBufferRange_ = {},
|
|
uint32_t maxStorageBufferRange_ = {},
|
|
uint32_t maxPushConstantsSize_ = {},
|
|
uint32_t maxMemoryAllocationCount_ = {},
|
|
uint32_t maxSamplerAllocationCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
|
|
uint32_t maxBoundDescriptorSets_ = {},
|
|
uint32_t maxPerStageDescriptorSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorInputAttachments_ = {},
|
|
uint32_t maxPerStageResources_ = {},
|
|
uint32_t maxDescriptorSetSamplers_ = {},
|
|
uint32_t maxDescriptorSetUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetSampledImages_ = {},
|
|
uint32_t maxDescriptorSetStorageImages_ = {},
|
|
uint32_t maxDescriptorSetInputAttachments_ = {},
|
|
uint32_t maxVertexInputAttributes_ = {},
|
|
uint32_t maxVertexInputBindings_ = {},
|
|
uint32_t maxVertexInputAttributeOffset_ = {},
|
|
uint32_t maxVertexInputBindingStride_ = {},
|
|
uint32_t maxVertexOutputComponents_ = {},
|
|
uint32_t maxTessellationGenerationLevel_ = {},
|
|
uint32_t maxTessellationPatchSize_ = {},
|
|
uint32_t maxTessellationControlPerVertexInputComponents_ = {},
|
|
uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
|
|
uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
|
|
uint32_t maxTessellationControlTotalOutputComponents_ = {},
|
|
uint32_t maxTessellationEvaluationInputComponents_ = {},
|
|
uint32_t maxTessellationEvaluationOutputComponents_ = {},
|
|
uint32_t maxGeometryShaderInvocations_ = {},
|
|
uint32_t maxGeometryInputComponents_ = {},
|
|
uint32_t maxGeometryOutputComponents_ = {},
|
|
uint32_t maxGeometryOutputVertices_ = {},
|
|
uint32_t maxGeometryTotalOutputComponents_ = {},
|
|
uint32_t maxFragmentInputComponents_ = {},
|
|
uint32_t maxFragmentOutputAttachments_ = {},
|
|
uint32_t maxFragmentDualSrcAttachments_ = {},
|
|
uint32_t maxFragmentCombinedOutputResources_ = {},
|
|
uint32_t maxComputeSharedMemorySize_ = {},
|
|
std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {},
|
|
uint32_t maxComputeWorkGroupInvocations_ = {},
|
|
std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {},
|
|
uint32_t subPixelPrecisionBits_ = {},
|
|
uint32_t subTexelPrecisionBits_ = {},
|
|
uint32_t mipmapPrecisionBits_ = {},
|
|
uint32_t maxDrawIndexedIndexValue_ = {},
|
|
uint32_t maxDrawIndirectCount_ = {},
|
|
float maxSamplerLodBias_ = {},
|
|
float maxSamplerAnisotropy_ = {},
|
|
uint32_t maxViewports_ = {},
|
|
std::array<uint32_t,2> const& maxViewportDimensions_ = {},
|
|
std::array<float,2> const& viewportBoundsRange_ = {},
|
|
uint32_t viewportSubPixelBits_ = {},
|
|
size_t minMemoryMapAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
|
|
int32_t minTexelOffset_ = {},
|
|
uint32_t maxTexelOffset_ = {},
|
|
int32_t minTexelGatherOffset_ = {},
|
|
uint32_t maxTexelGatherOffset_ = {},
|
|
float minInterpolationOffset_ = {},
|
|
float maxInterpolationOffset_ = {},
|
|
uint32_t subPixelInterpolationOffsetBits_ = {},
|
|
uint32_t maxFramebufferWidth_ = {},
|
|
uint32_t maxFramebufferHeight_ = {},
|
|
uint32_t maxFramebufferLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
|
|
uint32_t maxColorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
|
|
uint32_t maxSampleMaskWords_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
|
|
float timestampPeriod_ = {},
|
|
uint32_t maxClipDistances_ = {},
|
|
uint32_t maxCullDistances_ = {},
|
|
uint32_t maxCombinedClipAndCullDistances_ = {},
|
|
uint32_t discreteQueuePriorities_ = {},
|
|
std::array<float,2> const& pointSizeRange_ = {},
|
|
std::array<float,2> const& lineWidthRange_ = {},
|
|
float pointSizeGranularity_ = {},
|
|
float lineWidthGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxImageDimension1D( maxImageDimension1D_ )
|
|
, maxImageDimension2D( maxImageDimension2D_ )
|
|
, maxImageDimension3D( maxImageDimension3D_ )
|
|
, maxImageDimensionCube( maxImageDimensionCube_ )
|
|
, maxImageArrayLayers( maxImageArrayLayers_ )
|
|
, maxTexelBufferElements( maxTexelBufferElements_ )
|
|
, maxUniformBufferRange( maxUniformBufferRange_ )
|
|
, maxStorageBufferRange( maxStorageBufferRange_ )
|
|
, maxPushConstantsSize( maxPushConstantsSize_ )
|
|
, maxMemoryAllocationCount( maxMemoryAllocationCount_ )
|
|
, maxSamplerAllocationCount( maxSamplerAllocationCount_ )
|
|
, bufferImageGranularity( bufferImageGranularity_ )
|
|
, sparseAddressSpaceSize( sparseAddressSpaceSize_ )
|
|
, maxBoundDescriptorSets( maxBoundDescriptorSets_ )
|
|
, maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
|
|
, maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
|
|
, maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
|
|
, maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
|
|
, maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
|
|
, maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
|
|
, maxPerStageResources( maxPerStageResources_ )
|
|
, maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
|
|
, maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
|
|
, maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
|
|
, maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
|
|
, maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
|
|
, maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
|
|
, maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
|
|
, maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
|
|
, maxVertexInputAttributes( maxVertexInputAttributes_ )
|
|
, maxVertexInputBindings( maxVertexInputBindings_ )
|
|
, maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
|
|
, maxVertexInputBindingStride( maxVertexInputBindingStride_ )
|
|
, maxVertexOutputComponents( maxVertexOutputComponents_ )
|
|
, maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
|
|
, maxTessellationPatchSize( maxTessellationPatchSize_ )
|
|
, maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
|
|
, maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
|
|
, maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
|
|
, maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
|
|
, maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
|
|
, maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
|
|
, maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
|
|
, maxGeometryInputComponents( maxGeometryInputComponents_ )
|
|
, maxGeometryOutputComponents( maxGeometryOutputComponents_ )
|
|
, maxGeometryOutputVertices( maxGeometryOutputVertices_ )
|
|
, maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
|
|
, maxFragmentInputComponents( maxFragmentInputComponents_ )
|
|
, maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
|
|
, maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
|
|
, maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
|
|
, maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
|
|
, maxComputeWorkGroupCount{}
|
|
, maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
|
|
, maxComputeWorkGroupSize{}
|
|
, subPixelPrecisionBits( subPixelPrecisionBits_ )
|
|
, subTexelPrecisionBits( subTexelPrecisionBits_ )
|
|
, mipmapPrecisionBits( mipmapPrecisionBits_ )
|
|
, maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
|
|
, maxDrawIndirectCount( maxDrawIndirectCount_ )
|
|
, maxSamplerLodBias( maxSamplerLodBias_ )
|
|
, maxSamplerAnisotropy( maxSamplerAnisotropy_ )
|
|
, maxViewports( maxViewports_ )
|
|
, maxViewportDimensions{}
|
|
, viewportBoundsRange{}
|
|
, viewportSubPixelBits( viewportSubPixelBits_ )
|
|
, minMemoryMapAlignment( minMemoryMapAlignment_ )
|
|
, minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
|
|
, minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
|
|
, minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
|
|
, minTexelOffset( minTexelOffset_ )
|
|
, maxTexelOffset( maxTexelOffset_ )
|
|
, minTexelGatherOffset( minTexelGatherOffset_ )
|
|
, maxTexelGatherOffset( maxTexelGatherOffset_ )
|
|
, minInterpolationOffset( minInterpolationOffset_ )
|
|
, maxInterpolationOffset( maxInterpolationOffset_ )
|
|
, subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
|
|
, maxFramebufferWidth( maxFramebufferWidth_ )
|
|
, maxFramebufferHeight( maxFramebufferHeight_ )
|
|
, maxFramebufferLayers( maxFramebufferLayers_ )
|
|
, framebufferColorSampleCounts( framebufferColorSampleCounts_ )
|
|
, framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
|
|
, framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
|
|
, framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
|
|
, maxColorAttachments( maxColorAttachments_ )
|
|
, sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
|
|
, sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
|
|
, sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
|
|
, sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
|
|
, storageImageSampleCounts( storageImageSampleCounts_ )
|
|
, maxSampleMaskWords( maxSampleMaskWords_ )
|
|
, timestampComputeAndGraphics( timestampComputeAndGraphics_ )
|
|
, timestampPeriod( timestampPeriod_ )
|
|
, maxClipDistances( maxClipDistances_ )
|
|
, maxCullDistances( maxCullDistances_ )
|
|
, maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
|
|
, discreteQueuePriorities( discreteQueuePriorities_ )
|
|
, pointSizeRange{}
|
|
, lineWidthRange{}
|
|
, pointSizeGranularity( pointSizeGranularity_ )
|
|
, lineWidthGranularity( lineWidthGranularity_ )
|
|
, strictLines( strictLines_ )
|
|
, standardSampleLocations( standardSampleLocations_ )
|
|
, optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
|
|
, optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
|
|
, nonCoherentAtomSize( nonCoherentAtomSize_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,2>::copy( maxViewportDimensions, maxViewportDimensions_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( viewportBoundsRange, viewportBoundsRange_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( pointSizeRange, pointSizeRange_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( lineWidthRange, lineWidthRange_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: maxImageDimension1D( rhs.maxImageDimension1D )
|
|
, maxImageDimension2D( rhs.maxImageDimension2D )
|
|
, maxImageDimension3D( rhs.maxImageDimension3D )
|
|
, maxImageDimensionCube( rhs.maxImageDimensionCube )
|
|
, maxImageArrayLayers( rhs.maxImageArrayLayers )
|
|
, maxTexelBufferElements( rhs.maxTexelBufferElements )
|
|
, maxUniformBufferRange( rhs.maxUniformBufferRange )
|
|
, maxStorageBufferRange( rhs.maxStorageBufferRange )
|
|
, maxPushConstantsSize( rhs.maxPushConstantsSize )
|
|
, maxMemoryAllocationCount( rhs.maxMemoryAllocationCount )
|
|
, maxSamplerAllocationCount( rhs.maxSamplerAllocationCount )
|
|
, bufferImageGranularity( rhs.bufferImageGranularity )
|
|
, sparseAddressSpaceSize( rhs.sparseAddressSpaceSize )
|
|
, maxBoundDescriptorSets( rhs.maxBoundDescriptorSets )
|
|
, maxPerStageDescriptorSamplers( rhs.maxPerStageDescriptorSamplers )
|
|
, maxPerStageDescriptorUniformBuffers( rhs.maxPerStageDescriptorUniformBuffers )
|
|
, maxPerStageDescriptorStorageBuffers( rhs.maxPerStageDescriptorStorageBuffers )
|
|
, maxPerStageDescriptorSampledImages( rhs.maxPerStageDescriptorSampledImages )
|
|
, maxPerStageDescriptorStorageImages( rhs.maxPerStageDescriptorStorageImages )
|
|
, maxPerStageDescriptorInputAttachments( rhs.maxPerStageDescriptorInputAttachments )
|
|
, maxPerStageResources( rhs.maxPerStageResources )
|
|
, maxDescriptorSetSamplers( rhs.maxDescriptorSetSamplers )
|
|
, maxDescriptorSetUniformBuffers( rhs.maxDescriptorSetUniformBuffers )
|
|
, maxDescriptorSetUniformBuffersDynamic( rhs.maxDescriptorSetUniformBuffersDynamic )
|
|
, maxDescriptorSetStorageBuffers( rhs.maxDescriptorSetStorageBuffers )
|
|
, maxDescriptorSetStorageBuffersDynamic( rhs.maxDescriptorSetStorageBuffersDynamic )
|
|
, maxDescriptorSetSampledImages( rhs.maxDescriptorSetSampledImages )
|
|
, maxDescriptorSetStorageImages( rhs.maxDescriptorSetStorageImages )
|
|
, maxDescriptorSetInputAttachments( rhs.maxDescriptorSetInputAttachments )
|
|
, maxVertexInputAttributes( rhs.maxVertexInputAttributes )
|
|
, maxVertexInputBindings( rhs.maxVertexInputBindings )
|
|
, maxVertexInputAttributeOffset( rhs.maxVertexInputAttributeOffset )
|
|
, maxVertexInputBindingStride( rhs.maxVertexInputBindingStride )
|
|
, maxVertexOutputComponents( rhs.maxVertexOutputComponents )
|
|
, maxTessellationGenerationLevel( rhs.maxTessellationGenerationLevel )
|
|
, maxTessellationPatchSize( rhs.maxTessellationPatchSize )
|
|
, maxTessellationControlPerVertexInputComponents( rhs.maxTessellationControlPerVertexInputComponents )
|
|
, maxTessellationControlPerVertexOutputComponents( rhs.maxTessellationControlPerVertexOutputComponents )
|
|
, maxTessellationControlPerPatchOutputComponents( rhs.maxTessellationControlPerPatchOutputComponents )
|
|
, maxTessellationControlTotalOutputComponents( rhs.maxTessellationControlTotalOutputComponents )
|
|
, maxTessellationEvaluationInputComponents( rhs.maxTessellationEvaluationInputComponents )
|
|
, maxTessellationEvaluationOutputComponents( rhs.maxTessellationEvaluationOutputComponents )
|
|
, maxGeometryShaderInvocations( rhs.maxGeometryShaderInvocations )
|
|
, maxGeometryInputComponents( rhs.maxGeometryInputComponents )
|
|
, maxGeometryOutputComponents( rhs.maxGeometryOutputComponents )
|
|
, maxGeometryOutputVertices( rhs.maxGeometryOutputVertices )
|
|
, maxGeometryTotalOutputComponents( rhs.maxGeometryTotalOutputComponents )
|
|
, maxFragmentInputComponents( rhs.maxFragmentInputComponents )
|
|
, maxFragmentOutputAttachments( rhs.maxFragmentOutputAttachments )
|
|
, maxFragmentDualSrcAttachments( rhs.maxFragmentDualSrcAttachments )
|
|
, maxFragmentCombinedOutputResources( rhs.maxFragmentCombinedOutputResources )
|
|
, maxComputeSharedMemorySize( rhs.maxComputeSharedMemorySize )
|
|
, maxComputeWorkGroupCount{}
|
|
, maxComputeWorkGroupInvocations( rhs.maxComputeWorkGroupInvocations )
|
|
, maxComputeWorkGroupSize{}
|
|
, subPixelPrecisionBits( rhs.subPixelPrecisionBits )
|
|
, subTexelPrecisionBits( rhs.subTexelPrecisionBits )
|
|
, mipmapPrecisionBits( rhs.mipmapPrecisionBits )
|
|
, maxDrawIndexedIndexValue( rhs.maxDrawIndexedIndexValue )
|
|
, maxDrawIndirectCount( rhs.maxDrawIndirectCount )
|
|
, maxSamplerLodBias( rhs.maxSamplerLodBias )
|
|
, maxSamplerAnisotropy( rhs.maxSamplerAnisotropy )
|
|
, maxViewports( rhs.maxViewports )
|
|
, maxViewportDimensions{}
|
|
, viewportBoundsRange{}
|
|
, viewportSubPixelBits( rhs.viewportSubPixelBits )
|
|
, minMemoryMapAlignment( rhs.minMemoryMapAlignment )
|
|
, minTexelBufferOffsetAlignment( rhs.minTexelBufferOffsetAlignment )
|
|
, minUniformBufferOffsetAlignment( rhs.minUniformBufferOffsetAlignment )
|
|
, minStorageBufferOffsetAlignment( rhs.minStorageBufferOffsetAlignment )
|
|
, minTexelOffset( rhs.minTexelOffset )
|
|
, maxTexelOffset( rhs.maxTexelOffset )
|
|
, minTexelGatherOffset( rhs.minTexelGatherOffset )
|
|
, maxTexelGatherOffset( rhs.maxTexelGatherOffset )
|
|
, minInterpolationOffset( rhs.minInterpolationOffset )
|
|
, maxInterpolationOffset( rhs.maxInterpolationOffset )
|
|
, subPixelInterpolationOffsetBits( rhs.subPixelInterpolationOffsetBits )
|
|
, maxFramebufferWidth( rhs.maxFramebufferWidth )
|
|
, maxFramebufferHeight( rhs.maxFramebufferHeight )
|
|
, maxFramebufferLayers( rhs.maxFramebufferLayers )
|
|
, framebufferColorSampleCounts( rhs.framebufferColorSampleCounts )
|
|
, framebufferDepthSampleCounts( rhs.framebufferDepthSampleCounts )
|
|
, framebufferStencilSampleCounts( rhs.framebufferStencilSampleCounts )
|
|
, framebufferNoAttachmentsSampleCounts( rhs.framebufferNoAttachmentsSampleCounts )
|
|
, maxColorAttachments( rhs.maxColorAttachments )
|
|
, sampledImageColorSampleCounts( rhs.sampledImageColorSampleCounts )
|
|
, sampledImageIntegerSampleCounts( rhs.sampledImageIntegerSampleCounts )
|
|
, sampledImageDepthSampleCounts( rhs.sampledImageDepthSampleCounts )
|
|
, sampledImageStencilSampleCounts( rhs.sampledImageStencilSampleCounts )
|
|
, storageImageSampleCounts( rhs.storageImageSampleCounts )
|
|
, maxSampleMaskWords( rhs.maxSampleMaskWords )
|
|
, timestampComputeAndGraphics( rhs.timestampComputeAndGraphics )
|
|
, timestampPeriod( rhs.timestampPeriod )
|
|
, maxClipDistances( rhs.maxClipDistances )
|
|
, maxCullDistances( rhs.maxCullDistances )
|
|
, maxCombinedClipAndCullDistances( rhs.maxCombinedClipAndCullDistances )
|
|
, discreteQueuePriorities( rhs.discreteQueuePriorities )
|
|
, pointSizeRange{}
|
|
, lineWidthRange{}
|
|
, pointSizeGranularity( rhs.pointSizeGranularity )
|
|
, lineWidthGranularity( rhs.lineWidthGranularity )
|
|
, strictLines( rhs.strictLines )
|
|
, standardSampleLocations( rhs.standardSampleLocations )
|
|
, optimalBufferCopyOffsetAlignment( rhs.optimalBufferCopyOffsetAlignment )
|
|
, optimalBufferCopyRowPitchAlignment( rhs.optimalBufferCopyRowPitchAlignment )
|
|
, nonCoherentAtomSize( rhs.nonCoherentAtomSize )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,2>::copy( maxViewportDimensions, rhs.maxViewportDimensions );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( viewportBoundsRange, rhs.viewportBoundsRange );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( pointSizeRange, rhs.pointSizeRange );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( lineWidthRange, rhs.lineWidthRange );
|
|
}
|
|
|
|
PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( PhysicalDeviceLimits ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLimits const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( maxImageDimension1D == rhs.maxImageDimension1D )
|
|
&& ( maxImageDimension2D == rhs.maxImageDimension2D )
|
|
&& ( maxImageDimension3D == rhs.maxImageDimension3D )
|
|
&& ( maxImageDimensionCube == rhs.maxImageDimensionCube )
|
|
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
|
|
&& ( maxTexelBufferElements == rhs.maxTexelBufferElements )
|
|
&& ( maxUniformBufferRange == rhs.maxUniformBufferRange )
|
|
&& ( maxStorageBufferRange == rhs.maxStorageBufferRange )
|
|
&& ( maxPushConstantsSize == rhs.maxPushConstantsSize )
|
|
&& ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
|
|
&& ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
|
|
&& ( bufferImageGranularity == rhs.bufferImageGranularity )
|
|
&& ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
|
|
&& ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
|
|
&& ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
|
|
&& ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
|
|
&& ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
|
|
&& ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
|
|
&& ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
|
|
&& ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
|
|
&& ( maxPerStageResources == rhs.maxPerStageResources )
|
|
&& ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
|
|
&& ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
|
|
&& ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
|
|
&& ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
|
|
&& ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
|
|
&& ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
|
|
&& ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
|
|
&& ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
|
|
&& ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
|
|
&& ( maxVertexInputBindings == rhs.maxVertexInputBindings )
|
|
&& ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
|
|
&& ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
|
|
&& ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
|
|
&& ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
|
|
&& ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
|
|
&& ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
|
|
&& ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
|
|
&& ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
|
|
&& ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
|
|
&& ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
|
|
&& ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
|
|
&& ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
|
|
&& ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
|
|
&& ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
|
|
&& ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
|
|
&& ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
|
|
&& ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
|
|
&& ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
|
|
&& ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
|
|
&& ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
|
|
&& ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
|
|
&& ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
|
|
&& ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
|
|
&& ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
|
|
&& ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
|
|
&& ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
|
|
&& ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
|
|
&& ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
|
|
&& ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
|
|
&& ( maxSamplerLodBias == rhs.maxSamplerLodBias )
|
|
&& ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
|
|
&& ( maxViewports == rhs.maxViewports )
|
|
&& ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
|
|
&& ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
|
|
&& ( viewportSubPixelBits == rhs.viewportSubPixelBits )
|
|
&& ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
|
|
&& ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
|
|
&& ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
|
|
&& ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
|
|
&& ( minTexelOffset == rhs.minTexelOffset )
|
|
&& ( maxTexelOffset == rhs.maxTexelOffset )
|
|
&& ( minTexelGatherOffset == rhs.minTexelGatherOffset )
|
|
&& ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
|
|
&& ( minInterpolationOffset == rhs.minInterpolationOffset )
|
|
&& ( maxInterpolationOffset == rhs.maxInterpolationOffset )
|
|
&& ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
|
|
&& ( maxFramebufferWidth == rhs.maxFramebufferWidth )
|
|
&& ( maxFramebufferHeight == rhs.maxFramebufferHeight )
|
|
&& ( maxFramebufferLayers == rhs.maxFramebufferLayers )
|
|
&& ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
|
|
&& ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
|
|
&& ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
|
|
&& ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
|
|
&& ( maxColorAttachments == rhs.maxColorAttachments )
|
|
&& ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
|
|
&& ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
|
|
&& ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
|
|
&& ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
|
|
&& ( storageImageSampleCounts == rhs.storageImageSampleCounts )
|
|
&& ( maxSampleMaskWords == rhs.maxSampleMaskWords )
|
|
&& ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
|
|
&& ( timestampPeriod == rhs.timestampPeriod )
|
|
&& ( maxClipDistances == rhs.maxClipDistances )
|
|
&& ( maxCullDistances == rhs.maxCullDistances )
|
|
&& ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
|
|
&& ( discreteQueuePriorities == rhs.discreteQueuePriorities )
|
|
&& ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
|
|
&& ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
|
|
&& ( pointSizeGranularity == rhs.pointSizeGranularity )
|
|
&& ( lineWidthGranularity == rhs.lineWidthGranularity )
|
|
&& ( strictLines == rhs.strictLines )
|
|
&& ( standardSampleLocations == rhs.standardSampleLocations )
|
|
&& ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
|
|
&& ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
|
|
&& ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t maxImageDimension1D = {};
|
|
uint32_t maxImageDimension2D = {};
|
|
uint32_t maxImageDimension3D = {};
|
|
uint32_t maxImageDimensionCube = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
uint32_t maxTexelBufferElements = {};
|
|
uint32_t maxUniformBufferRange = {};
|
|
uint32_t maxStorageBufferRange = {};
|
|
uint32_t maxPushConstantsSize = {};
|
|
uint32_t maxMemoryAllocationCount = {};
|
|
uint32_t maxSamplerAllocationCount = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
|
|
uint32_t maxBoundDescriptorSets = {};
|
|
uint32_t maxPerStageDescriptorSamplers = {};
|
|
uint32_t maxPerStageDescriptorUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorSampledImages = {};
|
|
uint32_t maxPerStageDescriptorStorageImages = {};
|
|
uint32_t maxPerStageDescriptorInputAttachments = {};
|
|
uint32_t maxPerStageResources = {};
|
|
uint32_t maxDescriptorSetSamplers = {};
|
|
uint32_t maxDescriptorSetUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetStorageBuffers = {};
|
|
uint32_t maxDescriptorSetStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetSampledImages = {};
|
|
uint32_t maxDescriptorSetStorageImages = {};
|
|
uint32_t maxDescriptorSetInputAttachments = {};
|
|
uint32_t maxVertexInputAttributes = {};
|
|
uint32_t maxVertexInputBindings = {};
|
|
uint32_t maxVertexInputAttributeOffset = {};
|
|
uint32_t maxVertexInputBindingStride = {};
|
|
uint32_t maxVertexOutputComponents = {};
|
|
uint32_t maxTessellationGenerationLevel = {};
|
|
uint32_t maxTessellationPatchSize = {};
|
|
uint32_t maxTessellationControlPerVertexInputComponents = {};
|
|
uint32_t maxTessellationControlPerVertexOutputComponents = {};
|
|
uint32_t maxTessellationControlPerPatchOutputComponents = {};
|
|
uint32_t maxTessellationControlTotalOutputComponents = {};
|
|
uint32_t maxTessellationEvaluationInputComponents = {};
|
|
uint32_t maxTessellationEvaluationOutputComponents = {};
|
|
uint32_t maxGeometryShaderInvocations = {};
|
|
uint32_t maxGeometryInputComponents = {};
|
|
uint32_t maxGeometryOutputComponents = {};
|
|
uint32_t maxGeometryOutputVertices = {};
|
|
uint32_t maxGeometryTotalOutputComponents = {};
|
|
uint32_t maxFragmentInputComponents = {};
|
|
uint32_t maxFragmentOutputAttachments = {};
|
|
uint32_t maxFragmentDualSrcAttachments = {};
|
|
uint32_t maxFragmentCombinedOutputResources = {};
|
|
uint32_t maxComputeSharedMemorySize = {};
|
|
uint32_t maxComputeWorkGroupCount[3] = {};
|
|
uint32_t maxComputeWorkGroupInvocations = {};
|
|
uint32_t maxComputeWorkGroupSize[3] = {};
|
|
uint32_t subPixelPrecisionBits = {};
|
|
uint32_t subTexelPrecisionBits = {};
|
|
uint32_t mipmapPrecisionBits = {};
|
|
uint32_t maxDrawIndexedIndexValue = {};
|
|
uint32_t maxDrawIndirectCount = {};
|
|
float maxSamplerLodBias = {};
|
|
float maxSamplerAnisotropy = {};
|
|
uint32_t maxViewports = {};
|
|
uint32_t maxViewportDimensions[2] = {};
|
|
float viewportBoundsRange[2] = {};
|
|
uint32_t viewportSubPixelBits = {};
|
|
size_t minMemoryMapAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
|
|
int32_t minTexelOffset = {};
|
|
uint32_t maxTexelOffset = {};
|
|
int32_t minTexelGatherOffset = {};
|
|
uint32_t maxTexelGatherOffset = {};
|
|
float minInterpolationOffset = {};
|
|
float maxInterpolationOffset = {};
|
|
uint32_t subPixelInterpolationOffsetBits = {};
|
|
uint32_t maxFramebufferWidth = {};
|
|
uint32_t maxFramebufferHeight = {};
|
|
uint32_t maxFramebufferLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
|
|
uint32_t maxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
|
|
uint32_t maxSampleMaskWords = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
|
|
float timestampPeriod = {};
|
|
uint32_t maxClipDistances = {};
|
|
uint32_t maxCullDistances = {};
|
|
uint32_t maxCombinedClipAndCullDistances = {};
|
|
uint32_t discreteQueuePriorities = {};
|
|
float pointSizeRange[2] = {};
|
|
float lineWidthRange[2] = {};
|
|
float pointSizeGranularity = {};
|
|
float lineWidthGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceLineRasterizationFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: rectangularLines( rectangularLines_ )
|
|
, bresenhamLines( bresenhamLines_ )
|
|
, smoothLines( smoothLines_ )
|
|
, stippledRectangularLines( stippledRectangularLines_ )
|
|
, stippledBresenhamLines( stippledBresenhamLines_ )
|
|
, stippledSmoothLines( stippledSmoothLines_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, rectangularLines( rhs.rectangularLines )
|
|
, bresenhamLines( rhs.bresenhamLines )
|
|
, smoothLines( rhs.smoothLines )
|
|
, stippledRectangularLines( rhs.stippledRectangularLines )
|
|
, stippledBresenhamLines( rhs.stippledBresenhamLines )
|
|
, stippledSmoothLines( rhs.stippledSmoothLines )
|
|
{}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangularLines = rectangularLines_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bresenhamLines = bresenhamLines_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
smoothLines = smoothLines_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledRectangularLines = stippledRectangularLines_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledBresenhamLines = stippledBresenhamLines_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledSmoothLines = stippledSmoothLines_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rectangularLines == rhs.rectangularLines )
|
|
&& ( bresenhamLines == rhs.bresenhamLines )
|
|
&& ( smoothLines == rhs.smoothLines )
|
|
&& ( stippledRectangularLines == rhs.stippledRectangularLines )
|
|
&& ( stippledBresenhamLines == rhs.stippledBresenhamLines )
|
|
&& ( stippledSmoothLines == rhs.stippledSmoothLines );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceLineRasterizationPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, lineSubPixelPrecisionBits( rhs.lineSubPixelPrecisionBits )
|
|
{}
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t lineSubPixelPrecisionBits = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMaintenance3Properties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxPerSetDescriptors( maxPerSetDescriptors_ )
|
|
, maxMemoryAllocationSize( maxMemoryAllocationSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxPerSetDescriptors( rhs.maxPerSetDescriptors )
|
|
, maxMemoryAllocationSize( rhs.maxMemoryAllocationSize )
|
|
{}
|
|
|
|
PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
|
|
&& ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
|
|
void* pNext = {};
|
|
uint32_t maxPerSetDescriptors = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMemoryBudgetPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: heapBudget{}
|
|
, heapUsage{}
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS>::copy( heapBudget, heapBudget_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS>::copy( heapUsage, heapUsage_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, heapBudget{}
|
|
, heapUsage{}
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS>::copy( heapBudget, rhs.heapBudget );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS>::copy( heapUsage, rhs.heapUsage );
|
|
}
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 )
|
|
&& ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMemoryPriorityFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: memoryPriority( memoryPriority_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, memoryPriority( rhs.memoryPriority )
|
|
{}
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryPriority = memoryPriority_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryPriority == rhs.memoryPriority );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMemoryProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {},
|
|
uint32_t memoryHeapCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeCount( memoryTypeCount_ )
|
|
, memoryTypes{}
|
|
, memoryHeapCount( memoryHeapCount_ )
|
|
, memoryHeaps{}
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES>::copy( memoryTypes, memoryTypes_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS>::copy( memoryHeaps, memoryHeaps_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeCount( rhs.memoryTypeCount )
|
|
, memoryTypes{}
|
|
, memoryHeapCount( rhs.memoryHeapCount )
|
|
, memoryHeaps{}
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES>::copy( memoryTypes, rhs.memoryTypes );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS>::copy( memoryHeaps, rhs.memoryHeaps );
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( PhysicalDeviceMemoryProperties ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( memoryTypeCount == rhs.memoryTypeCount )
|
|
&& ( memcmp( memoryTypes, rhs.memoryTypes, std::min<uint32_t>( VK_MAX_MEMORY_TYPES, memoryTypeCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 )
|
|
&& ( memoryHeapCount == rhs.memoryHeapCount )
|
|
&& ( memcmp( memoryHeaps, rhs.memoryHeaps, std::min<uint32_t>( VK_MAX_MEMORY_HEAPS, memoryHeapCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t memoryTypeCount = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES] = {};
|
|
uint32_t memoryHeapCount = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMemoryProperties2
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: memoryProperties( memoryProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, memoryProperties( rhs.memoryProperties )
|
|
{}
|
|
|
|
PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryProperties == rhs.memoryProperties );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMeshShaderFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: taskShader( taskShader_ )
|
|
, meshShader( meshShader_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, taskShader( rhs.taskShader )
|
|
, meshShader( rhs.meshShader )
|
|
{}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
taskShader = taskShader_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
meshShader = meshShader_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( taskShader == rhs.taskShader )
|
|
&& ( meshShader == rhs.meshShader );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMeshShaderPropertiesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {},
|
|
uint32_t maxTaskWorkGroupInvocations_ = {},
|
|
std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {},
|
|
uint32_t maxTaskTotalMemorySize_ = {},
|
|
uint32_t maxTaskOutputCount_ = {},
|
|
uint32_t maxMeshWorkGroupInvocations_ = {},
|
|
std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {},
|
|
uint32_t maxMeshTotalMemorySize_ = {},
|
|
uint32_t maxMeshOutputVertices_ = {},
|
|
uint32_t maxMeshOutputPrimitives_ = {},
|
|
uint32_t maxMeshMultiviewViewCount_ = {},
|
|
uint32_t meshOutputPerVertexGranularity_ = {},
|
|
uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
|
|
, maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
|
|
, maxTaskWorkGroupSize{}
|
|
, maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
|
|
, maxTaskOutputCount( maxTaskOutputCount_ )
|
|
, maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
|
|
, maxMeshWorkGroupSize{}
|
|
, maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
|
|
, maxMeshOutputVertices( maxMeshOutputVertices_ )
|
|
, maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
|
|
, maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
|
|
, meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
|
|
, meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxDrawMeshTasksCount( rhs.maxDrawMeshTasksCount )
|
|
, maxTaskWorkGroupInvocations( rhs.maxTaskWorkGroupInvocations )
|
|
, maxTaskWorkGroupSize{}
|
|
, maxTaskTotalMemorySize( rhs.maxTaskTotalMemorySize )
|
|
, maxTaskOutputCount( rhs.maxTaskOutputCount )
|
|
, maxMeshWorkGroupInvocations( rhs.maxMeshWorkGroupInvocations )
|
|
, maxMeshWorkGroupSize{}
|
|
, maxMeshTotalMemorySize( rhs.maxMeshTotalMemorySize )
|
|
, maxMeshOutputVertices( rhs.maxMeshOutputVertices )
|
|
, maxMeshOutputPrimitives( rhs.maxMeshOutputPrimitives )
|
|
, maxMeshMultiviewViewCount( rhs.maxMeshMultiviewViewCount )
|
|
, meshOutputPerVertexGranularity( rhs.meshOutputPerVertexGranularity )
|
|
, meshOutputPerPrimitiveGranularity( rhs.meshOutputPerPrimitiveGranularity )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3>::copy( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize );
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
|
|
&& ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
|
|
&& ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
|
|
&& ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
|
|
&& ( maxTaskOutputCount == rhs.maxTaskOutputCount )
|
|
&& ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
|
|
&& ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
|
|
&& ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
|
|
&& ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
|
|
&& ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
|
|
&& ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
|
|
&& ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
|
|
&& ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
|
|
void* pNext = {};
|
|
uint32_t maxDrawMeshTasksCount = {};
|
|
uint32_t maxTaskWorkGroupInvocations = {};
|
|
uint32_t maxTaskWorkGroupSize[3] = {};
|
|
uint32_t maxTaskTotalMemorySize = {};
|
|
uint32_t maxTaskOutputCount = {};
|
|
uint32_t maxMeshWorkGroupInvocations = {};
|
|
uint32_t maxMeshWorkGroupSize[3] = {};
|
|
uint32_t maxMeshTotalMemorySize = {};
|
|
uint32_t maxMeshOutputVertices = {};
|
|
uint32_t maxMeshOutputPrimitives = {};
|
|
uint32_t maxMeshMultiviewViewCount = {};
|
|
uint32_t meshOutputPerVertexGranularity = {};
|
|
uint32_t meshOutputPerPrimitiveGranularity = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMultiviewFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: multiview( multiview_ )
|
|
, multiviewGeometryShader( multiviewGeometryShader_ )
|
|
, multiviewTessellationShader( multiviewTessellationShader_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, multiview( rhs.multiview )
|
|
, multiviewGeometryShader( rhs.multiviewGeometryShader )
|
|
, multiviewTessellationShader( rhs.multiviewTessellationShader )
|
|
{}
|
|
|
|
PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiview = multiview_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewGeometryShader = multiviewGeometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewTessellationShader = multiviewTessellationShader_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( multiview == rhs.multiview )
|
|
&& ( multiviewGeometryShader == rhs.multiviewGeometryShader )
|
|
&& ( multiviewTessellationShader == rhs.multiviewTessellationShader );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: perViewPositionAllComponents( perViewPositionAllComponents_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, perViewPositionAllComponents( rhs.perViewPositionAllComponents )
|
|
{}
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceMultiviewProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
|
|
uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxMultiviewViewCount( maxMultiviewViewCount_ )
|
|
, maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxMultiviewViewCount( rhs.maxMultiviewViewCount )
|
|
, maxMultiviewInstanceIndex( rhs.maxMultiviewInstanceIndex )
|
|
{}
|
|
|
|
PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
|
|
&& ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
|
|
void* pNext = {};
|
|
uint32_t maxMultiviewViewCount = {};
|
|
uint32_t maxMultiviewInstanceIndex = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDevicePCIBusInfoPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {},
|
|
uint32_t pciBus_ = {},
|
|
uint32_t pciDevice_ = {},
|
|
uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: pciDomain( pciDomain_ )
|
|
, pciBus( pciBus_ )
|
|
, pciDevice( pciDevice_ )
|
|
, pciFunction( pciFunction_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, pciDomain( rhs.pciDomain )
|
|
, pciBus( rhs.pciBus )
|
|
, pciDevice( rhs.pciDevice )
|
|
, pciFunction( rhs.pciFunction )
|
|
{}
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pciDomain == rhs.pciDomain )
|
|
&& ( pciBus == rhs.pciBus )
|
|
&& ( pciDevice == rhs.pciDevice )
|
|
&& ( pciFunction == rhs.pciFunction );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t pciDomain = {};
|
|
uint32_t pciBus = {};
|
|
uint32_t pciDevice = {};
|
|
uint32_t pciFunction = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: pipelineExecutableInfo( pipelineExecutableInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, pipelineExecutableInfo( rhs.pipelineExecutableInfo )
|
|
{}
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineExecutableInfo = pipelineExecutableInfo_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDevicePointClippingProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
|
|
: pointClippingBehavior( pointClippingBehavior_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, pointClippingBehavior( rhs.pointClippingBehavior )
|
|
{}
|
|
|
|
PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pointClippingBehavior == rhs.pointClippingBehavior );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
|
|
};
|
|
static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSparseProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
|
|
, residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
|
|
, residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
|
|
, residencyAlignedMipSize( residencyAlignedMipSize_ )
|
|
, residencyNonResidentStrict( residencyNonResidentStrict_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: residencyStandard2DBlockShape( rhs.residencyStandard2DBlockShape )
|
|
, residencyStandard2DMultisampleBlockShape( rhs.residencyStandard2DMultisampleBlockShape )
|
|
, residencyStandard3DBlockShape( rhs.residencyStandard3DBlockShape )
|
|
, residencyAlignedMipSize( rhs.residencyAlignedMipSize )
|
|
, residencyNonResidentStrict( rhs.residencyNonResidentStrict )
|
|
{}
|
|
|
|
PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( PhysicalDeviceSparseProperties ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
|
|
&& ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
|
|
&& ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
|
|
&& ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
|
|
&& ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {},
|
|
uint32_t driverVersion_ = {},
|
|
uint32_t vendorID_ = {},
|
|
uint32_t deviceID_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
|
|
std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {},
|
|
std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: apiVersion( apiVersion_ )
|
|
, driverVersion( driverVersion_ )
|
|
, vendorID( vendorID_ )
|
|
, deviceID( deviceID_ )
|
|
, deviceType( deviceType_ )
|
|
, deviceName{}
|
|
, pipelineCacheUUID{}
|
|
, limits( limits_ )
|
|
, sparseProperties( sparseProperties_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE>::copy( deviceName, deviceName_ );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( pipelineCacheUUID, pipelineCacheUUID_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: apiVersion( rhs.apiVersion )
|
|
, driverVersion( rhs.driverVersion )
|
|
, vendorID( rhs.vendorID )
|
|
, deviceID( rhs.deviceID )
|
|
, deviceType( rhs.deviceType )
|
|
, deviceName{}
|
|
, pipelineCacheUUID{}
|
|
, limits( rhs.limits )
|
|
, sparseProperties( rhs.sparseProperties )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE>::copy( deviceName, rhs.deviceName );
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE>::copy( pipelineCacheUUID, rhs.pipelineCacheUUID );
|
|
}
|
|
|
|
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( PhysicalDeviceProperties ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( apiVersion == rhs.apiVersion )
|
|
&& ( driverVersion == rhs.driverVersion )
|
|
&& ( vendorID == rhs.vendorID )
|
|
&& ( deviceID == rhs.deviceID )
|
|
&& ( deviceType == rhs.deviceType )
|
|
&& ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
|
|
&& ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
|
|
&& ( limits == rhs.limits )
|
|
&& ( sparseProperties == rhs.sparseProperties );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t apiVersion = {};
|
|
uint32_t driverVersion = {};
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
|
|
char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] = {};
|
|
uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceProperties2
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: properties( properties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, properties( rhs.properties )
|
|
{}
|
|
|
|
PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProperties2 const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( properties == rhs.properties );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceProtectedMemoryFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: protectedMemory( protectedMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, protectedMemory( rhs.protectedMemory )
|
|
{}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedMemory = protectedMemory_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( protectedMemory == rhs.protectedMemory );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceProtectedMemoryProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: protectedNoFault( protectedNoFault_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, protectedNoFault( rhs.protectedNoFault )
|
|
{}
|
|
|
|
PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( protectedNoFault == rhs.protectedNoFault );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDevicePushDescriptorPropertiesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxPushDescriptors( maxPushDescriptors_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxPushDescriptors( rhs.maxPushDescriptors )
|
|
{}
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxPushDescriptors == rhs.maxPushDescriptors );
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
|
|
void* pNext = {};
|
|
uint32_t maxPushDescriptors = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceRayTracingPropertiesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {},
|
|
uint32_t maxRecursionDepth_ = {},
|
|
uint32_t maxShaderGroupStride_ = {},
|
|
uint32_t shaderGroupBaseAlignment_ = {},
|
|
uint64_t maxGeometryCount_ = {},
|
|
uint64_t maxInstanceCount_ = {},
|
|
uint64_t maxTriangleCount_ = {},
|
|
uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderGroupHandleSize( shaderGroupHandleSize_ )
|
|
, maxRecursionDepth( maxRecursionDepth_ )
|
|
, maxShaderGroupStride( maxShaderGroupStride_ )
|
|
, shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
|
|
, maxGeometryCount( maxGeometryCount_ )
|
|
, maxInstanceCount( maxInstanceCount_ )
|
|
, maxTriangleCount( maxTriangleCount_ )
|
|
, maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderGroupHandleSize( rhs.shaderGroupHandleSize )
|
|
, maxRecursionDepth( rhs.maxRecursionDepth )
|
|
, maxShaderGroupStride( rhs.maxShaderGroupStride )
|
|
, shaderGroupBaseAlignment( rhs.shaderGroupBaseAlignment )
|
|
, maxGeometryCount( rhs.maxGeometryCount )
|
|
, maxInstanceCount( rhs.maxInstanceCount )
|
|
, maxTriangleCount( rhs.maxTriangleCount )
|
|
, maxDescriptorSetAccelerationStructures( rhs.maxDescriptorSetAccelerationStructures )
|
|
{}
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
|
|
&& ( maxRecursionDepth == rhs.maxRecursionDepth )
|
|
&& ( maxShaderGroupStride == rhs.maxShaderGroupStride )
|
|
&& ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
|
|
&& ( maxGeometryCount == rhs.maxGeometryCount )
|
|
&& ( maxInstanceCount == rhs.maxInstanceCount )
|
|
&& ( maxTriangleCount == rhs.maxTriangleCount )
|
|
&& ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
|
|
void* pNext = {};
|
|
uint32_t shaderGroupHandleSize = {};
|
|
uint32_t maxRecursionDepth = {};
|
|
uint32_t maxShaderGroupStride = {};
|
|
uint32_t shaderGroupBaseAlignment = {};
|
|
uint64_t maxGeometryCount = {};
|
|
uint64_t maxInstanceCount = {};
|
|
uint64_t maxTriangleCount = {};
|
|
uint32_t maxDescriptorSetAccelerationStructures = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: representativeFragmentTest( representativeFragmentTest_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, representativeFragmentTest( rhs.representativeFragmentTest )
|
|
{}
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
representativeFragmentTest = representativeFragmentTest_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( representativeFragmentTest == rhs.representativeFragmentTest );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSampleLocationsPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
|
|
std::array<float,2> const& sampleLocationCoordinateRange_ = {},
|
|
uint32_t sampleLocationSubPixelBits_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: sampleLocationSampleCounts( sampleLocationSampleCounts_ )
|
|
, maxSampleLocationGridSize( maxSampleLocationGridSize_ )
|
|
, sampleLocationCoordinateRange{}
|
|
, sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
|
|
, variableSampleLocations( variableSampleLocations_ )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ );
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, sampleLocationSampleCounts( rhs.sampleLocationSampleCounts )
|
|
, maxSampleLocationGridSize( rhs.maxSampleLocationGridSize )
|
|
, sampleLocationCoordinateRange{}
|
|
, sampleLocationSubPixelBits( rhs.sampleLocationSubPixelBits )
|
|
, variableSampleLocations( rhs.variableSampleLocations )
|
|
{
|
|
VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2>::copy( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange );
|
|
}
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
|
|
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
|
|
&& ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 )
|
|
&& ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
|
|
&& ( variableSampleLocations == rhs.variableSampleLocations );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
|
|
float sampleLocationCoordinateRange[2] = {};
|
|
uint32_t sampleLocationSubPixelBits = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
|
|
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, filterMinmaxSingleComponentFormats( rhs.filterMinmaxSingleComponentFormats )
|
|
, filterMinmaxImageComponentMapping( rhs.filterMinmaxImageComponentMapping )
|
|
{}
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxPropertiesEXT & operator=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) - offsetof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
|
|
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSamplerYcbcrConversionFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: samplerYcbcrConversion( samplerYcbcrConversion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, samplerYcbcrConversion( rhs.samplerYcbcrConversion )
|
|
{}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversion = samplerYcbcrConversion_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceScalarBlockLayoutFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: scalarBlockLayout( scalarBlockLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, scalarBlockLayout( rhs.scalarBlockLayout )
|
|
{}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeaturesEXT & operator=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) - offsetof( PhysicalDeviceScalarBlockLayoutFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeaturesEXT & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scalarBlockLayout = scalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( scalarBlockLayout == rhs.scalarBlockLayout );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderAtomicInt64FeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
|
|
, shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderBufferInt64Atomics( rhs.shaderBufferInt64Atomics )
|
|
, shaderSharedInt64Atomics( rhs.shaderSharedInt64Atomics )
|
|
{}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR & operator=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) - offsetof( PhysicalDeviceShaderAtomicInt64FeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
|
|
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderCoreProperties2AMD
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {},
|
|
uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderCoreFeatures( shaderCoreFeatures_ )
|
|
, activeComputeUnitCount( activeComputeUnitCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderCoreFeatures( rhs.shaderCoreFeatures )
|
|
, activeComputeUnitCount( rhs.activeComputeUnitCount )
|
|
{}
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderCoreFeatures == rhs.shaderCoreFeatures )
|
|
&& ( activeComputeUnitCount == rhs.activeComputeUnitCount );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
|
|
uint32_t activeComputeUnitCount = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderCorePropertiesAMD
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {},
|
|
uint32_t shaderArraysPerEngineCount_ = {},
|
|
uint32_t computeUnitsPerShaderArray_ = {},
|
|
uint32_t simdPerComputeUnit_ = {},
|
|
uint32_t wavefrontsPerSimd_ = {},
|
|
uint32_t wavefrontSize_ = {},
|
|
uint32_t sgprsPerSimd_ = {},
|
|
uint32_t minSgprAllocation_ = {},
|
|
uint32_t maxSgprAllocation_ = {},
|
|
uint32_t sgprAllocationGranularity_ = {},
|
|
uint32_t vgprsPerSimd_ = {},
|
|
uint32_t minVgprAllocation_ = {},
|
|
uint32_t maxVgprAllocation_ = {},
|
|
uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderEngineCount( shaderEngineCount_ )
|
|
, shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
|
|
, computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
|
|
, simdPerComputeUnit( simdPerComputeUnit_ )
|
|
, wavefrontsPerSimd( wavefrontsPerSimd_ )
|
|
, wavefrontSize( wavefrontSize_ )
|
|
, sgprsPerSimd( sgprsPerSimd_ )
|
|
, minSgprAllocation( minSgprAllocation_ )
|
|
, maxSgprAllocation( maxSgprAllocation_ )
|
|
, sgprAllocationGranularity( sgprAllocationGranularity_ )
|
|
, vgprsPerSimd( vgprsPerSimd_ )
|
|
, minVgprAllocation( minVgprAllocation_ )
|
|
, maxVgprAllocation( maxVgprAllocation_ )
|
|
, vgprAllocationGranularity( vgprAllocationGranularity_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderEngineCount( rhs.shaderEngineCount )
|
|
, shaderArraysPerEngineCount( rhs.shaderArraysPerEngineCount )
|
|
, computeUnitsPerShaderArray( rhs.computeUnitsPerShaderArray )
|
|
, simdPerComputeUnit( rhs.simdPerComputeUnit )
|
|
, wavefrontsPerSimd( rhs.wavefrontsPerSimd )
|
|
, wavefrontSize( rhs.wavefrontSize )
|
|
, sgprsPerSimd( rhs.sgprsPerSimd )
|
|
, minSgprAllocation( rhs.minSgprAllocation )
|
|
, maxSgprAllocation( rhs.maxSgprAllocation )
|
|
, sgprAllocationGranularity( rhs.sgprAllocationGranularity )
|
|
, vgprsPerSimd( rhs.vgprsPerSimd )
|
|
, minVgprAllocation( rhs.minVgprAllocation )
|
|
, maxVgprAllocation( rhs.maxVgprAllocation )
|
|
, vgprAllocationGranularity( rhs.vgprAllocationGranularity )
|
|
{}
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderEngineCount == rhs.shaderEngineCount )
|
|
&& ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
|
|
&& ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
|
|
&& ( simdPerComputeUnit == rhs.simdPerComputeUnit )
|
|
&& ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
|
|
&& ( wavefrontSize == rhs.wavefrontSize )
|
|
&& ( sgprsPerSimd == rhs.sgprsPerSimd )
|
|
&& ( minSgprAllocation == rhs.minSgprAllocation )
|
|
&& ( maxSgprAllocation == rhs.maxSgprAllocation )
|
|
&& ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
|
|
&& ( vgprsPerSimd == rhs.vgprsPerSimd )
|
|
&& ( minVgprAllocation == rhs.minVgprAllocation )
|
|
&& ( maxVgprAllocation == rhs.maxVgprAllocation )
|
|
&& ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
|
|
void* pNext = {};
|
|
uint32_t shaderEngineCount = {};
|
|
uint32_t shaderArraysPerEngineCount = {};
|
|
uint32_t computeUnitsPerShaderArray = {};
|
|
uint32_t simdPerComputeUnit = {};
|
|
uint32_t wavefrontsPerSimd = {};
|
|
uint32_t wavefrontSize = {};
|
|
uint32_t sgprsPerSimd = {};
|
|
uint32_t minSgprAllocation = {};
|
|
uint32_t maxSgprAllocation = {};
|
|
uint32_t sgprAllocationGranularity = {};
|
|
uint32_t vgprsPerSimd = {};
|
|
uint32_t minVgprAllocation = {};
|
|
uint32_t maxVgprAllocation = {};
|
|
uint32_t vgprAllocationGranularity = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderDemoteToHelperInvocation( rhs.shaderDemoteToHelperInvocation )
|
|
{}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderDrawParametersFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderDrawParameters( shaderDrawParameters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderDrawParameters( rhs.shaderDrawParameters )
|
|
{}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDrawParameters = shaderDrawParameters_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderDrawParameters == rhs.shaderDrawParameters );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderFloat16Int8FeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderFloat16( shaderFloat16_ )
|
|
, shaderInt8( shaderInt8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderFloat16( rhs.shaderFloat16 )
|
|
, shaderInt8( rhs.shaderInt8 )
|
|
{}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR & operator=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) - offsetof( PhysicalDeviceShaderFloat16Int8FeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat16 = shaderFloat16_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt8 = shaderInt8_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderFloat16 == rhs.shaderFloat16 )
|
|
&& ( shaderInt8 == rhs.shaderInt8 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderImageFootprintFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: imageFootprint( imageFootprint_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, imageFootprint( rhs.imageFootprint )
|
|
{}
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageFootprint = imageFootprint_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageFootprint == rhs.imageFootprint );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderIntegerFunctions2( shaderIntegerFunctions2_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderIntegerFunctions2( rhs.shaderIntegerFunctions2 )
|
|
{}
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerFunctions2 = shaderIntegerFunctions2_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderSMBuiltins( shaderSMBuiltins_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderSMBuiltins( rhs.shaderSMBuiltins )
|
|
{}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSMBuiltins = shaderSMBuiltins_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSMBuiltins == rhs.shaderSMBuiltins );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {},
|
|
uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shaderSMCount( shaderSMCount_ )
|
|
, shaderWarpsPerSM( shaderWarpsPerSM_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shaderSMCount( rhs.shaderSMCount )
|
|
, shaderWarpsPerSM( rhs.shaderWarpsPerSM )
|
|
{}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSMCount == rhs.shaderSMCount )
|
|
&& ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
|
|
void* pNext = {};
|
|
uint32_t shaderSMCount = {};
|
|
uint32_t shaderWarpsPerSM = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShadingRateImageFeaturesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateImage( shadingRateImage_ )
|
|
, shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shadingRateImage( rhs.shadingRateImage )
|
|
, shadingRateCoarseSampleOrder( rhs.shadingRateCoarseSampleOrder )
|
|
{}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateImage = shadingRateImage_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateImage == rhs.shadingRateImage )
|
|
&& ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceShadingRateImagePropertiesNV
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {},
|
|
uint32_t shadingRatePaletteSize_ = {},
|
|
uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateTexelSize( shadingRateTexelSize_ )
|
|
, shadingRatePaletteSize( shadingRatePaletteSize_ )
|
|
, shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, shadingRateTexelSize( rhs.shadingRateTexelSize )
|
|
, shadingRatePaletteSize( rhs.shadingRatePaletteSize )
|
|
, shadingRateMaxCoarseSamples( rhs.shadingRateMaxCoarseSamples )
|
|
{}
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateTexelSize == rhs.shadingRateTexelSize )
|
|
&& ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
|
|
&& ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
|
|
uint32_t shadingRatePaletteSize = {};
|
|
uint32_t shadingRateMaxCoarseSamples = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSparseImageFormatInfo2
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ )
|
|
, type( type_ )
|
|
, samples( samples_ )
|
|
, usage( usage_ )
|
|
, tiling( tiling_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, format( rhs.format )
|
|
, type( rhs.type )
|
|
, samples( rhs.samples )
|
|
, usage( rhs.usage )
|
|
, tiling( rhs.tiling )
|
|
{}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( type == rhs.type )
|
|
&& ( samples == rhs.samples )
|
|
&& ( usage == rhs.usage )
|
|
&& ( tiling == rhs.tiling );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSubgroupProperties
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: subgroupSize( subgroupSize_ )
|
|
, supportedStages( supportedStages_ )
|
|
, supportedOperations( supportedOperations_ )
|
|
, quadOperationsInAllStages( quadOperationsInAllStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, subgroupSize( rhs.subgroupSize )
|
|
, supportedStages( rhs.supportedStages )
|
|
, supportedOperations( rhs.supportedOperations )
|
|
, quadOperationsInAllStages( rhs.quadOperationsInAllStages )
|
|
{}
|
|
|
|
PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subgroupSize == rhs.subgroupSize )
|
|
&& ( supportedStages == rhs.supportedStages )
|
|
&& ( supportedOperations == rhs.supportedOperations )
|
|
&& ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
|
|
void* pNext = {};
|
|
uint32_t subgroupSize = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: subgroupSizeControl( subgroupSizeControl_ )
|
|
, computeFullSubgroups( computeFullSubgroups_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, subgroupSizeControl( rhs.subgroupSizeControl )
|
|
, computeFullSubgroups( rhs.computeFullSubgroups )
|
|
{}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupSizeControl = subgroupSizeControl_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeFullSubgroups = computeFullSubgroups_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subgroupSizeControl == rhs.subgroupSizeControl )
|
|
&& ( computeFullSubgroups == rhs.computeFullSubgroups );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {},
|
|
uint32_t maxSubgroupSize_ = {},
|
|
uint32_t maxComputeWorkgroupSubgroups_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: minSubgroupSize( minSubgroupSize_ )
|
|
, maxSubgroupSize( maxSubgroupSize_ )
|
|
, maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
|
|
, requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, minSubgroupSize( rhs.minSubgroupSize )
|
|
, maxSubgroupSize( rhs.maxSubgroupSize )
|
|
, maxComputeWorkgroupSubgroups( rhs.maxComputeWorkgroupSubgroups )
|
|
, requiredSubgroupSizeStages( rhs.requiredSubgroupSizeStages )
|
|
{}
|
|
|
|
PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minSubgroupSize == rhs.minSubgroupSize )
|
|
&& ( maxSubgroupSize == rhs.maxSubgroupSize )
|
|
&& ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
|
|
&& ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t minSubgroupSize = {};
|
|
uint32_t maxSubgroupSize = {};
|
|
uint32_t maxComputeWorkgroupSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceSurfaceInfo2KHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: surface( surface_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, surface( rhs.surface )
|
|
{}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( surface == rhs.surface );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: texelBufferAlignment( texelBufferAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, texelBufferAlignment( rhs.texelBufferAlignment )
|
|
{}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
texelBufferAlignment = texelBufferAlignment_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( texelBufferAlignment == rhs.texelBufferAlignment );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
|
|
, storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
|
|
, uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
|
|
, uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, storageTexelBufferOffsetAlignmentBytes( rhs.storageTexelBufferOffsetAlignmentBytes )
|
|
, storageTexelBufferOffsetSingleTexelAlignment( rhs.storageTexelBufferOffsetSingleTexelAlignment )
|
|
, uniformTexelBufferOffsetAlignmentBytes( rhs.uniformTexelBufferOffsetAlignmentBytes )
|
|
, uniformTexelBufferOffsetSingleTexelAlignment( rhs.uniformTexelBufferOffsetSingleTexelAlignment )
|
|
{}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
|
|
&& ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
|
|
&& ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
|
|
&& ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, textureCompressionASTC_HDR( rhs.textureCompressionASTC_HDR )
|
|
{}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
|
|
const void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceTransformFeedbackFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: transformFeedback( transformFeedback_ )
|
|
, geometryStreams( geometryStreams_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, transformFeedback( rhs.transformFeedback )
|
|
, geometryStreams( rhs.geometryStreams )
|
|
{}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformFeedback = transformFeedback_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryStreams = geometryStreams_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( transformFeedback == rhs.transformFeedback )
|
|
&& ( geometryStreams == rhs.geometryStreams );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceTransformFeedbackPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {},
|
|
uint32_t maxTransformFeedbackBuffers_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {},
|
|
uint32_t maxTransformFeedbackStreamDataSize_ = {},
|
|
uint32_t maxTransformFeedbackBufferDataSize_ = {},
|
|
uint32_t maxTransformFeedbackBufferDataStride_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
|
|
, maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
|
|
, maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
|
|
, maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
|
|
, maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ )
|
|
, maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ )
|
|
, transformFeedbackQueries( transformFeedbackQueries_ )
|
|
, transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
|
|
, transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
|
|
, transformFeedbackDraw( transformFeedbackDraw_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxTransformFeedbackStreams( rhs.maxTransformFeedbackStreams )
|
|
, maxTransformFeedbackBuffers( rhs.maxTransformFeedbackBuffers )
|
|
, maxTransformFeedbackBufferSize( rhs.maxTransformFeedbackBufferSize )
|
|
, maxTransformFeedbackStreamDataSize( rhs.maxTransformFeedbackStreamDataSize )
|
|
, maxTransformFeedbackBufferDataSize( rhs.maxTransformFeedbackBufferDataSize )
|
|
, maxTransformFeedbackBufferDataStride( rhs.maxTransformFeedbackBufferDataStride )
|
|
, transformFeedbackQueries( rhs.transformFeedbackQueries )
|
|
, transformFeedbackStreamsLinesTriangles( rhs.transformFeedbackStreamsLinesTriangles )
|
|
, transformFeedbackRasterizationStreamSelect( rhs.transformFeedbackRasterizationStreamSelect )
|
|
, transformFeedbackDraw( rhs.transformFeedbackDraw )
|
|
{}
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
|
|
&& ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
|
|
&& ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
|
|
&& ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
|
|
&& ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
|
|
&& ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
|
|
&& ( transformFeedbackQueries == rhs.transformFeedbackQueries )
|
|
&& ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
|
|
&& ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
|
|
&& ( transformFeedbackDraw == rhs.transformFeedbackDraw );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t maxTransformFeedbackStreams = {};
|
|
uint32_t maxTransformFeedbackBuffers = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
|
|
uint32_t maxTransformFeedbackStreamDataSize = {};
|
|
uint32_t maxTransformFeedbackBufferDataSize = {};
|
|
uint32_t maxTransformFeedbackBufferDataStride = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: uniformBufferStandardLayout( uniformBufferStandardLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, uniformBufferStandardLayout( rhs.uniformBufferStandardLayout )
|
|
{}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & operator=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformBufferStandardLayout = uniformBufferStandardLayout_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceVariablePointersFeatures
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: variablePointersStorageBuffer( variablePointersStorageBuffer_ )
|
|
, variablePointers( variablePointers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, variablePointersStorageBuffer( rhs.variablePointersStorageBuffer )
|
|
, variablePointers( rhs.variablePointers )
|
|
{}
|
|
|
|
PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointersStorageBuffer = variablePointersStorageBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointers = variablePointers_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
|
|
&& ( variablePointers == rhs.variablePointers );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
|
|
, vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, vertexAttributeInstanceRateDivisor( rhs.vertexAttributeInstanceRateDivisor )
|
|
, vertexAttributeInstanceRateZeroDivisor( rhs.vertexAttributeInstanceRateZeroDivisor )
|
|
{}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
|
|
&& ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxVertexAttribDivisor( maxVertexAttribDivisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, maxVertexAttribDivisor( rhs.maxVertexAttribDivisor )
|
|
{}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
void* pNext = {};
|
|
uint32_t maxVertexAttribDivisor = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceVulkanMemoryModelFeaturesKHR
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: vulkanMemoryModel( vulkanMemoryModel_ )
|
|
, vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
|
|
, vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, vulkanMemoryModel( rhs.vulkanMemoryModel )
|
|
, vulkanMemoryModelDeviceScope( rhs.vulkanMemoryModelDeviceScope )
|
|
, vulkanMemoryModelAvailabilityVisibilityChains( rhs.vulkanMemoryModelAvailabilityVisibilityChains )
|
|
{}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR & operator=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) - offsetof( PhysicalDeviceVulkanMemoryModelFeaturesKHR, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModel = vulkanMemoryModel_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
|
|
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
|
|
&& ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
|
|
{
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: ycbcrImageArrays( ycbcrImageArrays_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, ycbcrImageArrays( rhs.ycbcrImageArrays )
|
|
{}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrImageArrays = ycbcrImageArrays_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ycbcrImageArrays == rhs.ycbcrImageArrays );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
void* pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
|
|
};
|
|
static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
} // namespace VULKAN_HPP_NAMESPACE
|