mirror of
https://github.com/skyline-emu/skyline.git
synced 2024-12-25 17:51:49 +01:00
500b49d329
This commit adds Vulkan-Hpp as a library to the project. The headers are from a modified version of `VulkanHppGenerator`. They are broken into multiple files to avoid exceeding the Intellisense file size limit of Android Studio.
1394 lines
95 KiB
C++
1394 lines
95 KiB
C++
// Copyright (c) 2015-2019 The Khronos Group Inc.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
//
|
|
// ---- Exceptions to the Apache 2.0 License: ----
|
|
//
|
|
// As an exception, if you use this Software to generate code and portions of
|
|
// this Software are embedded into the generated code as a result, you may
|
|
// redistribute such product without providing attribution as would otherwise
|
|
// be required by Sections 4(a), 4(b) and 4(d) of the License.
|
|
//
|
|
// In addition, if you combine or link code generated by this Software with
|
|
// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
|
|
// ("`Combined Software`") and if a court of competent jurisdiction determines
|
|
// that the patent provision (Section 3), the indemnity provision (Section 9)
|
|
// or other Section of the License conflicts with the conditions of the
|
|
// applicable GPL or LGPL license, you may retroactively and prospectively
|
|
// choose to deem waived or otherwise exclude such Section(s) of the License,
|
|
// but only in their entirety and only with respect to the Combined Software.
|
|
//
|
|
|
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
|
|
|
#pragma once
|
|
|
|
#include "../structs.hpp"
|
|
|
|
namespace VULKAN_HPP_NAMESPACE
|
|
{
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( pConditionalRenderingBegin ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( &conditionalRenderingBegin ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
|
|
#else
|
|
if ( counterBuffers.size() != counterBufferOffsets.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize*>( counterBufferOffsets.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ), reinterpret_cast<const VkDeviceSize*>( pSizes ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d ) const
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
|
|
#else
|
|
if ( buffers.size() != offsets.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( buffers.size() == sizes.size() );
|
|
#else
|
|
if ( buffers.size() != sizes.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( offsets.size() == sizes.size() );
|
|
#else
|
|
if ( offsets.size() != sizes.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), reinterpret_cast<const VkDeviceSize*>( offsets.data() ), reinterpret_cast<const VkDeviceSize*>( sizes.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d ) const
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
|
|
#else
|
|
if ( buffers.size() != offsets.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), reinterpret_cast<const VkDeviceSize*>( offsets.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndRenderPass( m_commandBuffer );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndRenderPass( m_commandBuffer );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
|
|
#else
|
|
if ( counterBuffers.size() != counterBufferOffsets.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize*>( counterBufferOffsets.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename T, typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( pCustomSampleOrders ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size() , reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( customSampleOrders.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D*>( pExclusiveScissors ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast<const VkRect2D*>( exclusiveScissors.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( pMarkerInfo ) ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( &markerInfo ) ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( pOverrideInfo ) ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( &overrideInfo ) ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( pMarkerInfo ) ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( &markerInfo ) ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( pSampleLocationsInfo ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( &sampleLocationsInfo ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV*>( pShadingRatePalettes ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast<const VkShadingRatePaletteNV*>( shadingRatePalettes.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename T, typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
|
|
}
|
|
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureNV*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
|
|
}
|
|
#else
|
|
template<typename Dispatch>
|
|
VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const
|
|
{
|
|
Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
|
|
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" );
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
} // namespace VULKAN_HPP_NAMESPACE
|