// Copyright (c) 2015-2019 The Khronos Group Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---- Exceptions to the Apache 2.0 License: ---- // // As an exception, if you use this Software to generate code and portions of // this Software are embedded into the generated code as a result, you may // redistribute such product without providing attribution as would otherwise // be required by Sections 4(a), 4(b) and 4(d) of the License. // // In addition, if you combine or link code generated by this Software with // software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 // ("`Combined Software`") and if a court of competent jurisdiction determines // that the patent provision (Section 3), the indemnity provision (Section 9) // or other Section of the License conflicts with the conditions of the // applicable GPL or LGPL license, you may retroactively and prospectively // choose to deem waived or otherwise exclude such Section(s) of the License, // but only in their entirety and only with respect to the Combined Software. // // This header is generated from the Khronos Vulkan XML API Registry. #pragma once #include "../handles.hpp" #include "VkAcquire.hpp" #include "VkAcceleration.hpp" #include "VkApplication.hpp" #include "VkInitialize.hpp" #include "VkAllocation.hpp" #include "VkExternal.hpp" #include "VkBind.hpp" #include "VkObject.hpp" #include "VkCooperative.hpp" #include "VkAndroid.hpp" #include "VkImport.hpp" #include "VkImage.hpp" #include "VkDescriptor.hpp" #include "VkBase.hpp" #include "VkAttachment.hpp" #include "VkBuffer.hpp" #include "VkFramebuffer.hpp" #include "VkCalibrated.hpp" #include "VkDevice.hpp" #include "VkCheckpoint.hpp" #include "VkConformance.hpp" #include "VkClear.hpp" #include "VkCmd.hpp" #include "VkCoarse.hpp" #include "VkExtension.hpp" #include "VkCommand.hpp" #include "VkFormat.hpp" #include "VkMetal.hpp" #include "VkComponent.hpp" #include "VkCopy.hpp" #include "VkCompute.hpp" #include "VkPast.hpp" #include "VkConditional.hpp" #include "VkMapped.hpp" #include "VkD3D.hpp" #include "VkDebug.hpp" #include "VkDedicated.hpp" #include "VkFence.hpp" #include "VkDispatch.hpp" #include "VkPipeline.hpp" #include "VkDraw.hpp" #include "VkDisplay.hpp" #include "VkDrm.hpp" #include "VkEvent.hpp" #include "VkExport.hpp" #include "VkRay.hpp" #include "VkExtent.hpp" #include "VkPerformance.hpp" #include "VkFilter.hpp" #include "VkRender.hpp" #include "VkGeometry.hpp" #include "VkGraphics.hpp" #include "VkHdr.hpp" #include "VkHeadless.hpp" #include "VkMultisample.hpp" #include "VkI.hpp" #include "VkIndirect.hpp" #include "VkInput.hpp" #include "VkOffset.hpp" #include "VkMemory.hpp" #include "VkInstance.hpp" #include "VkLayer.hpp" #include "VkMac.hpp" #include "VkPhysical.hpp" #include "VkPresent.hpp" #include "VkProtected.hpp" #include "VkPush.hpp" #include "VkQuery.hpp" #include "VkQueue.hpp" #include "VkRect.hpp" #include "VkRefresh.hpp" #include "VkSample.hpp" #include "VkSampler.hpp" #include "VkSemaphore.hpp" #include "VkShader.hpp" namespace VULKAN_HPP_NAMESPACE { struct ShaderModuleCreateInfo { VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , codeSize( codeSize_ ) , pCode( pCode_ ) {} VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) , flags( rhs.flags ) , codeSize( rhs.codeSize ) , pCode( rhs.pCode ) {} ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) ); return *this; } ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT { codeSize = codeSize_; return *this; } ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT { pCode = pCode_; return *this; } operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>( ShaderModuleCreateInfo const& ) const = default; #else bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); } bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; size_t codeSize = {}; const uint32_t* pCode = {}; }; static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderModuleValidationCacheCreateInfoEXT { VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT : validationCache( validationCache_ ) {} VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT : pNext( rhs.pNext ) , validationCache( rhs.validationCache ) {} ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); return *this; } ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT { validationCache = validationCache_; return *this; } operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default; #else bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); } bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; const void* pNext = {}; VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; }; static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderResourceUsageAMD { VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT : numUsedVgprs( numUsedVgprs_ ) , numUsedSgprs( numUsedSgprs_ ) , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) {} VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const& rhs ) VULKAN_HPP_NOEXCEPT : numUsedVgprs( rhs.numUsedVgprs ) , numUsedSgprs( rhs.numUsedSgprs ) , ldsSizePerLocalWorkGroup( rhs.ldsSizePerLocalWorkGroup ) , ldsUsageSizeInBytes( rhs.ldsUsageSizeInBytes ) , scratchMemUsageInBytes( rhs.scratchMemUsageInBytes ) {} ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { memcpy( static_cast(this), &rhs, sizeof( ShaderResourceUsageAMD ) ); return *this; } ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>( ShaderResourceUsageAMD const& ) const = default; #else bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); } bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t numUsedVgprs = {}; uint32_t numUsedSgprs = {}; uint32_t ldsSizePerLocalWorkGroup = {}; size_t ldsUsageSizeInBytes = {}; size_t scratchMemUsageInBytes = {}; }; static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderStatisticsInfoAMD { VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : shaderStageMask( shaderStageMask_ ) , resourceUsage( resourceUsage_ ) , numPhysicalVgprs( numPhysicalVgprs_ ) , numPhysicalSgprs( numPhysicalSgprs_ ) , numAvailableVgprs( numAvailableVgprs_ ) , numAvailableSgprs( numAvailableSgprs_ ) , computeWorkGroupSize{} { VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( computeWorkGroupSize, computeWorkGroupSize_ ); } VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT : shaderStageMask( rhs.shaderStageMask ) , resourceUsage( rhs.resourceUsage ) , numPhysicalVgprs( rhs.numPhysicalVgprs ) , numPhysicalSgprs( rhs.numPhysicalSgprs ) , numAvailableVgprs( rhs.numAvailableVgprs ) , numAvailableSgprs( rhs.numAvailableSgprs ) , computeWorkGroupSize{} { VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( computeWorkGroupSize, rhs.computeWorkGroupSize ); } ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { memcpy( static_cast(this), &rhs, sizeof( ShaderStatisticsInfoAMD ) ); return *this; } ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = rhs; } ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default; #else bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); } bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; uint32_t numPhysicalVgprs = {}; uint32_t numPhysicalSgprs = {}; uint32_t numAvailableVgprs = {}; uint32_t numAvailableSgprs = {}; uint32_t computeWorkGroupSize[3] = {}; }; static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); } // namespace VULKAN_HPP_NAMESPACE