mirror of
https://github.com/skyline-emu/skyline.git
synced 2024-12-25 17:11:51 +01:00
500b49d329
This commit adds Vulkan-Hpp as a library to the project. The headers are from a modified version of `VulkanHppGenerator`. They are broken into multiple files to avoid exceeding the Intellisense file size limit of Android Studio.
832 lines
32 KiB
C++
832 lines
32 KiB
C++
// Copyright (c) 2015-2019 The Khronos Group Inc.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
//
|
|
// ---- Exceptions to the Apache 2.0 License: ----
|
|
//
|
|
// As an exception, if you use this Software to generate code and portions of
|
|
// this Software are embedded into the generated code as a result, you may
|
|
// redistribute such product without providing attribution as would otherwise
|
|
// be required by Sections 4(a), 4(b) and 4(d) of the License.
|
|
//
|
|
// In addition, if you combine or link code generated by this Software with
|
|
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
|
|
// ("`Combined Software`") and if a court of competent jurisdiction determines
|
|
// that the patent provision (Section 3), the indemnity provision (Section 9)
|
|
// or other Section of the License conflicts with the conditions of the
|
|
// applicable GPL or LGPL license, you may retroactively and prospectively
|
|
// choose to deem waived or otherwise exclude such Section(s) of the License,
|
|
// but only in their entirety and only with respect to the Combined Software.
|
|
//
|
|
|
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
|
|
|
#pragma once
|
|
|
|
#include "../handles.hpp"
|
|
#include "VkAcquire.hpp"
|
|
#include "VkAcceleration.hpp"
|
|
#include "VkApplication.hpp"
|
|
#include "VkInitialize.hpp"
|
|
#include "VkAllocation.hpp"
|
|
#include "VkExternal.hpp"
|
|
#include "VkBind.hpp"
|
|
#include "VkCooperative.hpp"
|
|
#include "VkAndroid.hpp"
|
|
#include "VkImport.hpp"
|
|
#include "VkImage.hpp"
|
|
#include "VkDescriptor.hpp"
|
|
#include "VkBase.hpp"
|
|
#include "VkAttachment.hpp"
|
|
#include "VkBuffer.hpp"
|
|
#include "VkFramebuffer.hpp"
|
|
#include "VkCalibrated.hpp"
|
|
#include "VkDevice.hpp"
|
|
#include "VkCheckpoint.hpp"
|
|
#include "VkConformance.hpp"
|
|
#include "VkClear.hpp"
|
|
#include "VkCmd.hpp"
|
|
#include "VkExtension.hpp"
|
|
#include "VkCoarse.hpp"
|
|
#include "VkCommand.hpp"
|
|
#include "VkMetal.hpp"
|
|
#include "VkFormat.hpp"
|
|
#include "VkComponent.hpp"
|
|
#include "VkCopy.hpp"
|
|
#include "VkCompute.hpp"
|
|
#include "VkConditional.hpp"
|
|
#include "VkMapped.hpp"
|
|
#include "VkD3D.hpp"
|
|
#include "VkDebug.hpp"
|
|
#include "VkFence.hpp"
|
|
#include "VkDedicated.hpp"
|
|
#include "VkDraw.hpp"
|
|
#include "VkDispatch.hpp"
|
|
#include "VkDisplay.hpp"
|
|
#include "VkDrm.hpp"
|
|
#include "VkEvent.hpp"
|
|
#include "VkExport.hpp"
|
|
#include "VkExtent.hpp"
|
|
#include "VkFilter.hpp"
|
|
#include "VkGeometry.hpp"
|
|
#include "VkGraphics.hpp"
|
|
#include "VkHdr.hpp"
|
|
#include "VkHeadless.hpp"
|
|
#include "VkMultisample.hpp"
|
|
#include "VkI.hpp"
|
|
#include "VkIndirect.hpp"
|
|
#include "VkInput.hpp"
|
|
#include "VkMemory.hpp"
|
|
#include "VkInstance.hpp"
|
|
#include "VkLayer.hpp"
|
|
#include "VkMac.hpp"
|
|
#include "VkObject.hpp"
|
|
|
|
namespace VULKAN_HPP_NAMESPACE
|
|
{
|
|
struct ObjectTableCreateInfoNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {},
|
|
const uint32_t* pObjectEntryCounts_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {},
|
|
uint32_t maxUniformBuffersPerDescriptor_ = {},
|
|
uint32_t maxStorageBuffersPerDescriptor_ = {},
|
|
uint32_t maxStorageImagesPerDescriptor_ = {},
|
|
uint32_t maxSampledImagesPerDescriptor_ = {},
|
|
uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: objectCount( objectCount_ )
|
|
, pObjectEntryTypes( pObjectEntryTypes_ )
|
|
, pObjectEntryCounts( pObjectEntryCounts_ )
|
|
, pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
|
|
, maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
|
|
, maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
|
|
, maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
|
|
, maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
|
|
, maxPipelineLayouts( maxPipelineLayouts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( ObjectTableCreateInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( rhs.pNext )
|
|
, objectCount( rhs.objectCount )
|
|
, pObjectEntryTypes( rhs.pObjectEntryTypes )
|
|
, pObjectEntryCounts( rhs.pObjectEntryCounts )
|
|
, pObjectEntryUsageFlags( rhs.pObjectEntryUsageFlags )
|
|
, maxUniformBuffersPerDescriptor( rhs.maxUniformBuffersPerDescriptor )
|
|
, maxStorageBuffersPerDescriptor( rhs.maxStorageBuffersPerDescriptor )
|
|
, maxStorageImagesPerDescriptor( rhs.maxStorageImagesPerDescriptor )
|
|
, maxSampledImagesPerDescriptor( rhs.maxSampledImagesPerDescriptor )
|
|
, maxPipelineLayouts( rhs.maxPipelineLayouts )
|
|
{}
|
|
|
|
ObjectTableCreateInfoNVX & operator=( ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( &pNext, &rhs.pNext, sizeof( ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = objectCount_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectEntryTypes = pObjectEntryTypes_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectEntryCounts = pObjectEntryCounts_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPipelineLayouts = maxPipelineLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTableCreateInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTableCreateInfoNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( objectCount == rhs.objectCount )
|
|
&& ( pObjectEntryTypes == rhs.pObjectEntryTypes )
|
|
&& ( pObjectEntryCounts == rhs.pObjectEntryCounts )
|
|
&& ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
|
|
&& ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
|
|
&& ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
|
|
&& ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
|
|
&& ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
|
|
&& ( maxPipelineLayouts == rhs.maxPipelineLayouts );
|
|
}
|
|
|
|
bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX;
|
|
const void* pNext = {};
|
|
uint32_t objectCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {};
|
|
const uint32_t* pObjectEntryCounts = {};
|
|
const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {};
|
|
uint32_t maxUniformBuffersPerDescriptor = {};
|
|
uint32_t maxStorageBuffersPerDescriptor = {};
|
|
uint32_t maxStorageImagesPerDescriptor = {};
|
|
uint32_t maxSampledImagesPerDescriptor = {};
|
|
uint32_t maxPipelineLayouts = {};
|
|
};
|
|
static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTableCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTableEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( ObjectTableEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
{}
|
|
|
|
ObjectTableEntryNVX & operator=( ObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTableEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTableEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTableEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTableEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags );
|
|
}
|
|
|
|
bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
};
|
|
static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTableEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTableDescriptorSetEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
, pipelineLayout( pipelineLayout_ )
|
|
, descriptorSet( descriptorSet_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( ObjectTableDescriptorSetEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
, pipelineLayout( rhs.pipelineLayout )
|
|
, descriptorSet( rhs.descriptorSet )
|
|
{}
|
|
|
|
explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} )
|
|
: type( objectTableEntryNVX.type )
|
|
, flags( objectTableEntryNVX.flags )
|
|
, pipelineLayout( pipelineLayout_ )
|
|
, descriptorSet( descriptorSet_ )
|
|
{}
|
|
|
|
ObjectTableDescriptorSetEntryNVX & operator=( ObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableDescriptorSetEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineLayout = pipelineLayout_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSet = descriptorSet_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTableDescriptorSetEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pipelineLayout == rhs.pipelineLayout )
|
|
&& ( descriptorSet == rhs.descriptorSet );
|
|
}
|
|
|
|
bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {};
|
|
};
|
|
static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTableDescriptorSetEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTableIndexBufferEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
, buffer( buffer_ )
|
|
, indexType( indexType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( ObjectTableIndexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
, buffer( rhs.buffer )
|
|
, indexType( rhs.indexType )
|
|
{}
|
|
|
|
explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 )
|
|
: type( objectTableEntryNVX.type )
|
|
, flags( objectTableEntryNVX.flags )
|
|
, buffer( buffer_ )
|
|
, indexType( indexType_ )
|
|
{}
|
|
|
|
ObjectTableIndexBufferEntryNVX & operator=( ObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableIndexBufferEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexType = indexType_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTableIndexBufferEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( indexType == rhs.indexType );
|
|
}
|
|
|
|
bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
|
|
};
|
|
static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTableIndexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTablePipelineEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
, pipeline( pipeline_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( ObjectTablePipelineEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
, pipeline( rhs.pipeline )
|
|
{}
|
|
|
|
explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} )
|
|
: type( objectTableEntryNVX.type )
|
|
, flags( objectTableEntryNVX.flags )
|
|
, pipeline( pipeline_ )
|
|
{}
|
|
|
|
ObjectTablePipelineEntryNVX & operator=( ObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePipelineEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipeline = pipeline_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTablePipelineEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTablePipelineEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pipeline == rhs.pipeline );
|
|
}
|
|
|
|
bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
|
|
};
|
|
static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTablePipelineEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTablePushConstantEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
, pipelineLayout( pipelineLayout_ )
|
|
, stageFlags( stageFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( ObjectTablePushConstantEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
, pipelineLayout( rhs.pipelineLayout )
|
|
, stageFlags( rhs.stageFlags )
|
|
{}
|
|
|
|
explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} )
|
|
: type( objectTableEntryNVX.type )
|
|
, flags( objectTableEntryNVX.flags )
|
|
, pipelineLayout( pipelineLayout_ )
|
|
, stageFlags( stageFlags_ )
|
|
{}
|
|
|
|
ObjectTablePushConstantEntryNVX & operator=( ObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePushConstantEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineLayout = pipelineLayout_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageFlags = stageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTablePushConstantEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pipelineLayout == rhs.pipelineLayout )
|
|
&& ( stageFlags == rhs.stageFlags );
|
|
}
|
|
|
|
bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
|
|
};
|
|
static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTablePushConstantEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
|
|
struct ObjectTableVertexBufferEntryNVX
|
|
{
|
|
VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, flags( flags_ )
|
|
, buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( ObjectTableVertexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT
|
|
: type( rhs.type )
|
|
, flags( rhs.flags )
|
|
, buffer( rhs.buffer )
|
|
{}
|
|
|
|
explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} )
|
|
: type( objectTableEntryNVX.type )
|
|
, flags( objectTableEntryNVX.flags )
|
|
, buffer( buffer_ )
|
|
{}
|
|
|
|
ObjectTableVertexBufferEntryNVX & operator=( ObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memcpy( static_cast<void*>(this), &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = rhs;
|
|
}
|
|
|
|
ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableVertexBufferEntryNVX const *>(&rhs);
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>( this );
|
|
}
|
|
|
|
operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>( this );
|
|
}
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ObjectTableVertexBufferEntryNVX const& ) const = default;
|
|
#else
|
|
bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( buffer == rhs.buffer );
|
|
}
|
|
|
|
bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
};
|
|
static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
|
|
static_assert( std::is_standard_layout<ObjectTableVertexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
|
|
} // namespace VULKAN_HPP_NAMESPACE
|