Program Listing for File command_buffer.hpp¶
↰ Return to documentation for file (inexor/vulkan-renderer/wrapper/command_buffer.hpp
)
#pragma once
#include "inexor/vulkan-renderer/wrapper/fence.hpp"
#include "inexor/vulkan-renderer/wrapper/gpu_memory_buffer.hpp"
#include <cassert>
#include <memory>
#include <span>
#include <vector>
namespace inexor::vulkan_renderer::wrapper {
// Forward declaration
class Device;
class CommandBuffer {
VkCommandBuffer m_command_buffer{VK_NULL_HANDLE};
const Device &m_device;
std::string m_name;
std::unique_ptr<Fence> m_wait_fence;
// The Device wrapper must be able to call begin_command_buffer and end_command_buffer
friend class Device;
mutable std::vector<GPUMemoryBuffer> m_staging_bufs;
friend class CommandPool;
const CommandBuffer & // NOLINT
begin_command_buffer(VkCommandBufferUsageFlags flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) const;
[[nodiscard]] VkBuffer create_staging_buffer(const void *data, const VkDeviceSize data_size,
const std::string &name) const {
assert(data);
assert(data_size > 0);
assert(!name.empty());
// Create a staging buffer for the copy operation and keep it until the CommandBuffer exceeds its lifetime
m_staging_bufs.emplace_back(m_device, name, data_size, data, data_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VMA_MEMORY_USAGE_CPU_ONLY);
return m_staging_bufs.back().buffer();
}
template <typename DataType>
[[nodiscard]] VkBuffer create_staging_buffer(const std::span<const DataType> data, const std::string &name) const {
return create_staging_buffer(data.data(), static_cast<VkDeviceSize>(sizeof(data) * data.size()), name);
}
const CommandBuffer &end_command_buffer() const; // NOLINT
public:
CommandBuffer(const Device &device, VkCommandPool cmd_pool, std::string name);
CommandBuffer(const CommandBuffer &) = delete;
CommandBuffer(CommandBuffer &&) noexcept;
~CommandBuffer() = default;
CommandBuffer &operator=(const CommandBuffer &) = delete;
CommandBuffer &operator=(CommandBuffer &&) = delete;
const CommandBuffer &begin_render_pass(const VkRenderPassBeginInfo &render_pass_bi, // NOLINT
VkSubpassContents subpass_contents = VK_SUBPASS_CONTENTS_INLINE) const;
const CommandBuffer &bind_descriptor_sets(std::span<const VkDescriptorSet> desc_sets, // NOLINT
VkPipelineLayout layout,
VkPipelineBindPoint bind_point = VK_PIPELINE_BIND_POINT_GRAPHICS,
std::uint32_t first_set = 0,
std::span<const std::uint32_t> dyn_offsets = {}) const;
const CommandBuffer &bind_index_buffer(VkBuffer buf, VkIndexType index_type = VK_INDEX_TYPE_UINT32, // NOLINT
VkDeviceSize offset = 0) const;
const CommandBuffer &bind_pipeline(VkPipeline pipeline, // NOLINT
VkPipelineBindPoint bind_point = VK_PIPELINE_BIND_POINT_GRAPHICS) const;
const CommandBuffer &bind_vertex_buffers(std::span<const VkBuffer> bufs, // NOLINT
std::uint32_t first_binding = 0,
std::span<const VkDeviceSize> offsets = {}) const;
const CommandBuffer & // NOLINT
change_image_layout(VkImage image, VkImageLayout old_layout, VkImageLayout new_layout,
VkImageSubresourceRange subres_range,
VkPipelineStageFlags src_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VkPipelineStageFlags dst_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) const;
const CommandBuffer & // NOLINT
change_image_layout(VkImage image, VkImageLayout old_layout, VkImageLayout new_layout,
std::uint32_t mip_level_count = 1, std::uint32_t array_layer_count = 1,
std::uint32_t base_mip_level = 0, std::uint32_t base_array_layer = 0,
VkPipelineStageFlags src_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VkPipelineStageFlags dst_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) const;
const CommandBuffer ©_buffer(VkBuffer src_buf, VkBuffer dst_buf, // NOLINT
const VkBufferCopy ©_region) const;
const CommandBuffer ©_buffer(VkBuffer src_buf, VkBuffer dst_buf, // NOLINT
std::span<const VkBufferCopy> copy_regions) const;
const CommandBuffer ©_buffer(VkBuffer src_buf, VkBuffer dst_buf, // NOLINT
VkDeviceSize src_buf_size) const;
const CommandBuffer ©_buffer_to_image(VkBuffer src_buf, VkImage dst_img, // NOLINT
std::span<const VkBufferImageCopy> copy_regions) const;
const CommandBuffer ©_buffer_to_image(VkBuffer src_buf, VkImage dst_img, // NOLINT
const VkBufferImageCopy ©_region) const;
const CommandBuffer ©_buffer_to_image(const void *data, const VkDeviceSize data_size, // NOLINT
VkImage dst_img, const VkBufferImageCopy ©_region,
const std::string &name) const;
template <typename DataType>
const CommandBuffer ©_buffer_to_image(const std::span<const DataType> data, // NOLINT
VkImage dst_img, const VkBufferImageCopy ©_region,
const std::string &name) const {
return copy_buffer_to_image(create_staging_buffer<DataType>(data, name), dst_img,
static_cast<VkDeviceSize>(sizeof(data) * data.size()), copy_region, name);
}
const CommandBuffer &draw(std::uint32_t vert_count, std::uint32_t inst_count = 1, // NOLINT
std::uint32_t first_vert = 0, std::uint32_t first_inst = 0) const;
const CommandBuffer &draw_indexed(std::uint32_t index_count, std::uint32_t inst_count = 1, // NOLINT
std::uint32_t first_index = 0, std::int32_t vert_offset = 0,
std::uint32_t first_inst = 0) const;
const CommandBuffer &end_render_pass() const; // NOLINT
[[nodiscard]] VkResult fence_status() const {
return m_wait_fence->status();
}
const CommandBuffer &pipeline_barrier(VkPipelineStageFlags src_stage_flags, // NOLINT
VkPipelineStageFlags dst_stage_flags,
std::span<const VkImageMemoryBarrier> img_mem_barriers,
std::span<const VkMemoryBarrier> mem_barriers = {},
std::span<const VkBufferMemoryBarrier> buf_mem_barriers = {},
VkDependencyFlags dep_flags = 0) const;
const CommandBuffer &pipeline_image_memory_barrier(VkPipelineStageFlags src_stage_flags, // NOLINT
VkPipelineStageFlags dst_stage_flags,
const VkImageMemoryBarrier &barrier) const;
const CommandBuffer &pipeline_memory_barrier(VkPipelineStageFlags src_stage_flags, // NOLINT
VkPipelineStageFlags dst_stage_flags,
const VkMemoryBarrier &barrier) const;
const CommandBuffer &full_barrier() const;
const CommandBuffer &push_constants(VkPipelineLayout layout, VkShaderStageFlags stage, // NOLINT
std::uint32_t size, const void *data, VkDeviceSize offset = 0) const;
template <typename T>
const CommandBuffer &push_constant(const VkPipelineLayout layout, const T &data, // NOLINT
const VkShaderStageFlags stage, const VkDeviceSize offset = 0) const {
return push_constants(layout, stage, sizeof(data), &data, offset);
}
// Graphics commands
// TODO(): Switch to taking in OOP wrappers when we have them (e.g. bind_vertex_buffers takes in a VertexBuffer)
[[nodiscard]] VkCommandBuffer get() const {
return m_command_buffer;
}
[[nodiscard]] const Fence &get_wait_fence() const {
return *m_wait_fence;
}
[[nodiscard]] const VkCommandBuffer *ptr() const {
return &m_command_buffer;
}
const CommandBuffer &reset_fence() const;
const CommandBuffer &submit(std::span<const VkSubmitInfo> submit_infos) const; // NOLINT
const CommandBuffer &submit(VkSubmitInfo submit_infos) const; // NOLINT
const CommandBuffer &submit() const; // NOLINT
const CommandBuffer &submit_and_wait(std::span<const VkSubmitInfo> submit_infos) const; // NOLINT
const CommandBuffer &submit_and_wait(VkSubmitInfo submit_info) const; // NOLINT
const CommandBuffer &submit_and_wait() const; // NOLINT
};
} // namespace inexor::vulkan_renderer::wrapper