2020-03-27 01:44:29 -04:00
|
|
|
// Copyright 2020 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include <exception>
|
|
|
|
#include <iterator>
|
|
|
|
#include <limits>
|
|
|
|
#include <memory>
|
|
|
|
#include <optional>
|
|
|
|
#include <type_traits>
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
#define VK_NO_PROTOTYPES
|
|
|
|
#include <vulkan/vulkan.h>
|
|
|
|
|
|
|
|
#include "common/common_types.h"
|
|
|
|
|
|
|
|
namespace Vulkan::vk {
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Span for Vulkan arrays.
|
|
|
|
* Based on std::span but optimized for array access instead of iterators.
|
|
|
|
* Size returns uint32_t instead of size_t to ease interaction with Vulkan functions.
|
|
|
|
*/
|
|
|
|
template <typename T>
|
|
|
|
class Span {
|
|
|
|
public:
|
2020-03-28 03:04:29 -04:00
|
|
|
using value_type = T;
|
|
|
|
using size_type = u32;
|
|
|
|
using difference_type = std::ptrdiff_t;
|
|
|
|
using reference = const T&;
|
|
|
|
using const_reference = const T&;
|
|
|
|
using pointer = const T*;
|
|
|
|
using const_pointer = const T*;
|
|
|
|
using iterator = const T*;
|
|
|
|
using const_iterator = const T*;
|
|
|
|
|
2020-03-27 01:44:29 -04:00
|
|
|
/// Construct an empty span.
|
|
|
|
constexpr Span() noexcept = default;
|
|
|
|
|
|
|
|
/// Construct a span from a single element.
|
|
|
|
constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {}
|
|
|
|
|
|
|
|
/// Construct a span from a range.
|
|
|
|
template <typename Range>
|
|
|
|
// requires std::data(const Range&)
|
|
|
|
// requires std::size(const Range&)
|
|
|
|
constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {}
|
|
|
|
|
|
|
|
/// Construct a span from a pointer and a size.
|
|
|
|
/// This is inteded for subranges.
|
|
|
|
constexpr Span(const T* ptr, std::size_t num) noexcept : ptr{ptr}, num{num} {}
|
|
|
|
|
|
|
|
/// Returns the data pointer by the span.
|
|
|
|
constexpr const T* data() const noexcept {
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the number of elements in the span.
|
2020-03-28 03:04:29 -04:00
|
|
|
/// @note Returns a 32 bits integer because most Vulkan functions expect this type.
|
2020-03-27 01:44:29 -04:00
|
|
|
constexpr u32 size() const noexcept {
|
|
|
|
return static_cast<u32>(num);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true when the span is empty.
|
|
|
|
constexpr bool empty() const noexcept {
|
|
|
|
return num == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a reference to the element in the passed index.
|
|
|
|
/// @pre: index < size()
|
|
|
|
constexpr const T& operator[](std::size_t index) const noexcept {
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator to the beginning of the span.
|
|
|
|
constexpr const T* begin() const noexcept {
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator to the end of the span.
|
|
|
|
constexpr const T* end() const noexcept {
|
|
|
|
return ptr + num;
|
|
|
|
}
|
|
|
|
|
2020-03-28 03:04:29 -04:00
|
|
|
/// Returns an iterator to the beginning of the span.
|
|
|
|
constexpr const T* cbegin() const noexcept {
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator to the end of the span.
|
|
|
|
constexpr const T* cend() const noexcept {
|
|
|
|
return ptr + num;
|
|
|
|
}
|
|
|
|
|
2020-03-27 01:44:29 -04:00
|
|
|
private:
|
|
|
|
const T* ptr = nullptr;
|
|
|
|
std::size_t num = 0;
|
|
|
|
};
|
|
|
|
|
2020-03-27 01:50:27 -04:00
|
|
|
/// Vulkan exception generated from a VkResult.
|
|
|
|
class Exception final : public std::exception {
|
|
|
|
public:
|
|
|
|
/// Construct the exception with a result.
|
|
|
|
/// @pre result != VK_SUCCESS
|
|
|
|
explicit Exception(VkResult result_) : result{result_} {}
|
|
|
|
virtual ~Exception() = default;
|
|
|
|
|
|
|
|
const char* what() const noexcept override;
|
|
|
|
|
|
|
|
private:
|
|
|
|
VkResult result;
|
|
|
|
};
|
|
|
|
|
2020-03-27 01:46:50 -04:00
|
|
|
/// Converts a VkResult enum into a rodata string
|
|
|
|
const char* ToString(VkResult) noexcept;
|
|
|
|
|
2020-03-27 01:50:27 -04:00
|
|
|
/// Throws a Vulkan exception if result is not success.
|
|
|
|
inline void Check(VkResult result) {
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
throw Exception(result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Throws a Vulkan exception if result is an error.
|
|
|
|
/// @return result
|
|
|
|
inline VkResult Filter(VkResult result) {
|
|
|
|
if (result < 0) {
|
|
|
|
throw Exception(result);
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-03-27 01:52:31 -04:00
|
|
|
/// Table holding Vulkan instance function pointers.
|
|
|
|
struct InstanceDispatch {
|
|
|
|
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
|
|
|
|
|
|
|
|
PFN_vkCreateInstance vkCreateInstance;
|
|
|
|
PFN_vkDestroyInstance vkDestroyInstance;
|
|
|
|
PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
|
2020-06-22 03:10:45 -04:00
|
|
|
PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
|
2020-03-27 01:52:31 -04:00
|
|
|
|
|
|
|
PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
|
|
|
|
PFN_vkCreateDevice vkCreateDevice;
|
|
|
|
PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
|
|
|
|
PFN_vkDestroyDevice vkDestroyDevice;
|
|
|
|
PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
|
|
|
|
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
|
|
|
|
PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
|
|
|
|
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
|
|
|
|
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
|
|
|
|
PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
|
|
|
|
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
|
|
|
|
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
|
|
|
|
PFN_vkQueuePresentKHR vkQueuePresentKHR;
|
|
|
|
};
|
|
|
|
|
|
|
|
/// Table holding Vulkan device function pointers.
|
|
|
|
struct DeviceDispatch : public InstanceDispatch {
|
|
|
|
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
|
|
|
|
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
|
|
|
|
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
|
|
|
|
PFN_vkAllocateMemory vkAllocateMemory;
|
|
|
|
PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
|
|
|
|
PFN_vkBindBufferMemory vkBindBufferMemory;
|
|
|
|
PFN_vkBindImageMemory vkBindImageMemory;
|
|
|
|
PFN_vkCmdBeginQuery vkCmdBeginQuery;
|
|
|
|
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
|
|
|
|
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
|
|
|
|
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
|
|
|
|
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
|
|
|
|
PFN_vkCmdBindPipeline vkCmdBindPipeline;
|
|
|
|
PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT;
|
|
|
|
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
|
|
|
|
PFN_vkCmdBlitImage vkCmdBlitImage;
|
|
|
|
PFN_vkCmdClearAttachments vkCmdClearAttachments;
|
|
|
|
PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
|
|
|
|
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
|
|
|
|
PFN_vkCmdCopyImage vkCmdCopyImage;
|
|
|
|
PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
|
|
|
|
PFN_vkCmdDispatch vkCmdDispatch;
|
|
|
|
PFN_vkCmdDraw vkCmdDraw;
|
|
|
|
PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
|
|
|
|
PFN_vkCmdEndQuery vkCmdEndQuery;
|
|
|
|
PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
|
|
|
|
PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT;
|
|
|
|
PFN_vkCmdFillBuffer vkCmdFillBuffer;
|
|
|
|
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
|
|
|
|
PFN_vkCmdPushConstants vkCmdPushConstants;
|
|
|
|
PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
|
|
|
|
PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
|
|
|
|
PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
|
2020-03-16 20:43:05 -04:00
|
|
|
PFN_vkCmdSetEvent vkCmdSetEvent;
|
2020-03-27 01:52:31 -04:00
|
|
|
PFN_vkCmdSetScissor vkCmdSetScissor;
|
|
|
|
PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
|
|
|
|
PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
|
|
|
|
PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
|
|
|
|
PFN_vkCmdSetViewport vkCmdSetViewport;
|
2020-04-28 01:14:11 -04:00
|
|
|
PFN_vkCmdWaitEvents vkCmdWaitEvents;
|
2020-06-21 20:17:56 -04:00
|
|
|
PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
|
|
|
|
PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
|
|
|
|
PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
|
|
|
|
PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
|
|
|
|
PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
|
|
|
|
PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
|
|
|
|
PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
|
|
|
|
PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
|
|
|
|
PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
|
|
|
|
PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
|
2020-03-27 01:52:31 -04:00
|
|
|
PFN_vkCreateBuffer vkCreateBuffer;
|
|
|
|
PFN_vkCreateBufferView vkCreateBufferView;
|
|
|
|
PFN_vkCreateCommandPool vkCreateCommandPool;
|
|
|
|
PFN_vkCreateComputePipelines vkCreateComputePipelines;
|
|
|
|
PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
|
|
|
|
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
|
|
|
|
PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR;
|
2020-03-16 20:43:05 -04:00
|
|
|
PFN_vkCreateEvent vkCreateEvent;
|
2020-03-27 01:52:31 -04:00
|
|
|
PFN_vkCreateFence vkCreateFence;
|
|
|
|
PFN_vkCreateFramebuffer vkCreateFramebuffer;
|
|
|
|
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
|
|
|
|
PFN_vkCreateImage vkCreateImage;
|
|
|
|
PFN_vkCreateImageView vkCreateImageView;
|
|
|
|
PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
|
|
|
|
PFN_vkCreateQueryPool vkCreateQueryPool;
|
|
|
|
PFN_vkCreateRenderPass vkCreateRenderPass;
|
|
|
|
PFN_vkCreateSampler vkCreateSampler;
|
|
|
|
PFN_vkCreateSemaphore vkCreateSemaphore;
|
|
|
|
PFN_vkCreateShaderModule vkCreateShaderModule;
|
|
|
|
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
|
|
|
|
PFN_vkDestroyBuffer vkDestroyBuffer;
|
|
|
|
PFN_vkDestroyBufferView vkDestroyBufferView;
|
|
|
|
PFN_vkDestroyCommandPool vkDestroyCommandPool;
|
|
|
|
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
|
|
|
|
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
|
|
|
|
PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR;
|
2020-03-16 20:43:05 -04:00
|
|
|
PFN_vkDestroyEvent vkDestroyEvent;
|
2020-03-27 01:52:31 -04:00
|
|
|
PFN_vkDestroyFence vkDestroyFence;
|
|
|
|
PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
|
|
|
|
PFN_vkDestroyImage vkDestroyImage;
|
|
|
|
PFN_vkDestroyImageView vkDestroyImageView;
|
|
|
|
PFN_vkDestroyPipeline vkDestroyPipeline;
|
|
|
|
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
|
|
|
|
PFN_vkDestroyQueryPool vkDestroyQueryPool;
|
|
|
|
PFN_vkDestroyRenderPass vkDestroyRenderPass;
|
|
|
|
PFN_vkDestroySampler vkDestroySampler;
|
|
|
|
PFN_vkDestroySemaphore vkDestroySemaphore;
|
|
|
|
PFN_vkDestroyShaderModule vkDestroyShaderModule;
|
|
|
|
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
|
|
|
|
PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
|
|
|
|
PFN_vkEndCommandBuffer vkEndCommandBuffer;
|
|
|
|
PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
|
|
|
|
PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
|
|
|
|
PFN_vkFreeMemory vkFreeMemory;
|
|
|
|
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
|
|
|
|
PFN_vkGetDeviceQueue vkGetDeviceQueue;
|
2020-03-16 20:43:05 -04:00
|
|
|
PFN_vkGetEventStatus vkGetEventStatus;
|
2020-03-27 01:52:31 -04:00
|
|
|
PFN_vkGetFenceStatus vkGetFenceStatus;
|
|
|
|
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
|
|
|
|
PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
|
|
|
|
PFN_vkMapMemory vkMapMemory;
|
|
|
|
PFN_vkQueueSubmit vkQueueSubmit;
|
|
|
|
PFN_vkResetFences vkResetFences;
|
|
|
|
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
|
|
|
|
PFN_vkUnmapMemory vkUnmapMemory;
|
|
|
|
PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR;
|
|
|
|
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
|
|
|
|
PFN_vkWaitForFences vkWaitForFences;
|
|
|
|
};
|
|
|
|
|
|
|
|
/// Loads instance agnostic function pointers.
|
|
|
|
/// @return True on success, false on error.
|
|
|
|
bool Load(InstanceDispatch&) noexcept;
|
|
|
|
|
|
|
|
/// Loads instance function pointers.
|
|
|
|
/// @return True on success, false on error.
|
|
|
|
bool Load(VkInstance, InstanceDispatch&) noexcept;
|
|
|
|
|
2020-03-27 01:53:52 -04:00
|
|
|
void Destroy(VkInstance, const InstanceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, const InstanceDispatch&) noexcept;
|
|
|
|
|
|
|
|
void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept;
|
2020-03-16 20:43:05 -04:00
|
|
|
void Destroy(VkDevice, VkEvent, const DeviceDispatch&) noexcept;
|
2020-03-27 01:53:52 -04:00
|
|
|
void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept;
|
|
|
|
void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept;
|
|
|
|
void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept;
|
|
|
|
|
|
|
|
VkResult Free(VkDevice, VkDescriptorPool, Span<VkDescriptorSet>, const DeviceDispatch&) noexcept;
|
|
|
|
VkResult Free(VkDevice, VkCommandPool, Span<VkCommandBuffer>, const DeviceDispatch&) noexcept;
|
|
|
|
|
2020-03-27 01:55:07 -04:00
|
|
|
template <typename Type, typename OwnerType, typename Dispatch>
|
|
|
|
class Handle;
|
|
|
|
|
|
|
|
/// Handle with an owning type.
|
|
|
|
/// Analogue to std::unique_ptr.
|
|
|
|
template <typename Type, typename OwnerType, typename Dispatch>
|
|
|
|
class Handle {
|
|
|
|
public:
|
|
|
|
/// Construct a handle and hold it's ownership.
|
|
|
|
explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept
|
|
|
|
: handle{handle_}, owner{owner_}, dld{&dld_} {}
|
|
|
|
|
|
|
|
/// Construct an empty handle.
|
|
|
|
Handle() = default;
|
|
|
|
|
|
|
|
/// Copying Vulkan objects is not supported and will never be.
|
|
|
|
Handle(const Handle&) = delete;
|
|
|
|
Handle& operator=(const Handle&) = delete;
|
|
|
|
|
|
|
|
/// Construct a handle transfering the ownership from another handle.
|
|
|
|
Handle(Handle&& rhs) noexcept
|
|
|
|
: handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {}
|
|
|
|
|
|
|
|
/// Assign the current handle transfering the ownership from another handle.
|
|
|
|
/// Destroys any previously held object.
|
|
|
|
Handle& operator=(Handle&& rhs) noexcept {
|
|
|
|
Release();
|
|
|
|
handle = std::exchange(rhs.handle, nullptr);
|
|
|
|
owner = rhs.owner;
|
|
|
|
dld = rhs.dld;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys the current handle if it existed.
|
|
|
|
~Handle() noexcept {
|
|
|
|
Release();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys any held object.
|
|
|
|
void reset() noexcept {
|
|
|
|
Release();
|
|
|
|
handle = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the address of the held object.
|
|
|
|
/// Intended for Vulkan structures that expect a pointer to an array.
|
|
|
|
const Type* address() const noexcept {
|
2020-03-28 03:04:29 -04:00
|
|
|
return std::addressof(handle);
|
2020-03-27 01:55:07 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the held Vulkan handle.
|
|
|
|
Type operator*() const noexcept {
|
|
|
|
return handle;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true when there's a held object.
|
2020-03-28 03:04:29 -04:00
|
|
|
explicit operator bool() const noexcept {
|
2020-03-27 01:55:07 -04:00
|
|
|
return handle != nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
protected:
|
|
|
|
Type handle = nullptr;
|
|
|
|
OwnerType owner = nullptr;
|
|
|
|
const Dispatch* dld = nullptr;
|
|
|
|
|
|
|
|
private:
|
|
|
|
/// Destroys the held object if it exists.
|
|
|
|
void Release() noexcept {
|
|
|
|
if (handle) {
|
|
|
|
Destroy(owner, handle, *dld);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// Dummy type used to specify a handle has no owner.
|
|
|
|
struct NoOwner {};
|
|
|
|
|
|
|
|
/// Handle without an owning type.
|
|
|
|
/// Analogue to std::unique_ptr
|
|
|
|
template <typename Type, typename Dispatch>
|
|
|
|
class Handle<Type, NoOwner, Dispatch> {
|
|
|
|
public:
|
|
|
|
/// Construct a handle and hold it's ownership.
|
|
|
|
explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {}
|
|
|
|
|
|
|
|
/// Construct an empty handle.
|
|
|
|
Handle() noexcept = default;
|
|
|
|
|
|
|
|
/// Copying Vulkan objects is not supported and will never be.
|
|
|
|
Handle(const Handle&) = delete;
|
|
|
|
Handle& operator=(const Handle&) = delete;
|
|
|
|
|
|
|
|
/// Construct a handle transfering ownership from another handle.
|
|
|
|
Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {}
|
|
|
|
|
|
|
|
/// Assign the current handle transfering the ownership from another handle.
|
|
|
|
/// Destroys any previously held object.
|
|
|
|
Handle& operator=(Handle&& rhs) noexcept {
|
|
|
|
Release();
|
|
|
|
handle = std::exchange(rhs.handle, nullptr);
|
|
|
|
dld = rhs.dld;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys the current handle if it existed.
|
|
|
|
~Handle() noexcept {
|
|
|
|
Release();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys any held object.
|
|
|
|
void reset() noexcept {
|
|
|
|
Release();
|
|
|
|
handle = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the address of the held object.
|
|
|
|
/// Intended for Vulkan structures that expect a pointer to an array.
|
|
|
|
const Type* address() const noexcept {
|
2020-03-28 03:04:29 -04:00
|
|
|
return std::addressof(handle);
|
2020-03-27 01:55:07 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the held Vulkan handle.
|
|
|
|
Type operator*() const noexcept {
|
|
|
|
return handle;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true when there's a held object.
|
|
|
|
operator bool() const noexcept {
|
|
|
|
return handle != nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
protected:
|
|
|
|
Type handle = nullptr;
|
|
|
|
const Dispatch* dld = nullptr;
|
|
|
|
|
|
|
|
private:
|
|
|
|
/// Destroys the held object if it exists.
|
|
|
|
void Release() noexcept {
|
|
|
|
if (handle) {
|
|
|
|
Destroy(handle, *dld);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-27 01:56:04 -04:00
|
|
|
/// Array of a pool allocation.
|
|
|
|
/// Analogue to std::vector
|
|
|
|
template <typename AllocationType, typename PoolType>
|
|
|
|
class PoolAllocations {
|
|
|
|
public:
|
|
|
|
/// Construct an empty allocation.
|
|
|
|
PoolAllocations() = default;
|
|
|
|
|
|
|
|
/// Construct an allocation. Errors are reported through IsOutOfPoolMemory().
|
|
|
|
explicit PoolAllocations(std::unique_ptr<AllocationType[]> allocations, std::size_t num,
|
|
|
|
VkDevice device, PoolType pool, const DeviceDispatch& dld) noexcept
|
|
|
|
: allocations{std::move(allocations)}, num{num}, device{device}, pool{pool}, dld{&dld} {}
|
|
|
|
|
|
|
|
/// Copying Vulkan allocations is not supported and will never be.
|
|
|
|
PoolAllocations(const PoolAllocations&) = delete;
|
|
|
|
PoolAllocations& operator=(const PoolAllocations&) = delete;
|
|
|
|
|
|
|
|
/// Construct an allocation transfering ownership from another allocation.
|
|
|
|
PoolAllocations(PoolAllocations&& rhs) noexcept
|
|
|
|
: allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool},
|
|
|
|
dld{rhs.dld} {}
|
|
|
|
|
|
|
|
/// Assign an allocation transfering ownership from another allocation.
|
|
|
|
/// Releases any previously held allocation.
|
|
|
|
PoolAllocations& operator=(PoolAllocations&& rhs) noexcept {
|
|
|
|
Release();
|
|
|
|
allocations = std::move(rhs.allocations);
|
|
|
|
num = rhs.num;
|
|
|
|
device = rhs.device;
|
|
|
|
pool = rhs.pool;
|
|
|
|
dld = rhs.dld;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys any held allocation.
|
|
|
|
~PoolAllocations() {
|
|
|
|
Release();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the number of allocations.
|
|
|
|
std::size_t size() const noexcept {
|
|
|
|
return num;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a pointer to the array of allocations.
|
|
|
|
AllocationType const* data() const noexcept {
|
|
|
|
return allocations.get();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the allocation in the specified index.
|
|
|
|
/// @pre index < size()
|
|
|
|
AllocationType operator[](std::size_t index) const noexcept {
|
|
|
|
return allocations[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
/// True when a pool fails to construct.
|
|
|
|
bool IsOutOfPoolMemory() const noexcept {
|
|
|
|
return !device;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
/// Destroys the held allocations if they exist.
|
|
|
|
void Release() noexcept {
|
|
|
|
if (!allocations) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const Span<AllocationType> span(allocations.get(), num);
|
|
|
|
const VkResult result = Free(device, pool, span, *dld);
|
|
|
|
// There's no way to report errors from a destructor.
|
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
std::terminate();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::unique_ptr<AllocationType[]> allocations;
|
|
|
|
std::size_t num = 0;
|
|
|
|
VkDevice device = nullptr;
|
|
|
|
PoolType pool = nullptr;
|
|
|
|
const DeviceDispatch* dld = nullptr;
|
|
|
|
};
|
|
|
|
|
2020-03-27 01:57:47 -04:00
|
|
|
using BufferView = Handle<VkBufferView, VkDevice, DeviceDispatch>;
|
|
|
|
using DebugCallback = Handle<VkDebugUtilsMessengerEXT, VkInstance, InstanceDispatch>;
|
|
|
|
using DescriptorSetLayout = Handle<VkDescriptorSetLayout, VkDevice, DeviceDispatch>;
|
|
|
|
using DescriptorUpdateTemplateKHR = Handle<VkDescriptorUpdateTemplateKHR, VkDevice, DeviceDispatch>;
|
|
|
|
using Framebuffer = Handle<VkFramebuffer, VkDevice, DeviceDispatch>;
|
|
|
|
using ImageView = Handle<VkImageView, VkDevice, DeviceDispatch>;
|
|
|
|
using Pipeline = Handle<VkPipeline, VkDevice, DeviceDispatch>;
|
|
|
|
using PipelineLayout = Handle<VkPipelineLayout, VkDevice, DeviceDispatch>;
|
|
|
|
using QueryPool = Handle<VkQueryPool, VkDevice, DeviceDispatch>;
|
|
|
|
using RenderPass = Handle<VkRenderPass, VkDevice, DeviceDispatch>;
|
|
|
|
using Sampler = Handle<VkSampler, VkDevice, DeviceDispatch>;
|
|
|
|
using Semaphore = Handle<VkSemaphore, VkDevice, DeviceDispatch>;
|
|
|
|
using ShaderModule = Handle<VkShaderModule, VkDevice, DeviceDispatch>;
|
|
|
|
using SurfaceKHR = Handle<VkSurfaceKHR, VkInstance, InstanceDispatch>;
|
|
|
|
|
|
|
|
using DescriptorSets = PoolAllocations<VkDescriptorSet, VkDescriptorPool>;
|
|
|
|
using CommandBuffers = PoolAllocations<VkCommandBuffer, VkCommandPool>;
|
|
|
|
|
2020-03-31 19:26:44 -04:00
|
|
|
/// Vulkan instance owning handle.
|
|
|
|
class Instance : public Handle<VkInstance, NoOwner, InstanceDispatch> {
|
|
|
|
using Handle<VkInstance, NoOwner, InstanceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
/// Creates a Vulkan instance. Use "operator bool" for error handling.
|
|
|
|
static Instance Create(Span<const char*> layers, Span<const char*> extensions,
|
|
|
|
InstanceDispatch& dld) noexcept;
|
|
|
|
|
|
|
|
/// Enumerates physical devices.
|
|
|
|
/// @return Physical devices and an empty handle on failure.
|
|
|
|
std::optional<std::vector<VkPhysicalDevice>> EnumeratePhysicalDevices();
|
|
|
|
|
|
|
|
/// Tries to create a debug callback messenger. Returns an empty handle on failure.
|
|
|
|
DebugCallback TryCreateDebugCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept;
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:27:44 -04:00
|
|
|
class Queue {
|
|
|
|
public:
|
|
|
|
/// Construct an empty queue handle.
|
|
|
|
constexpr Queue() noexcept = default;
|
|
|
|
|
|
|
|
/// Construct a queue handle.
|
|
|
|
constexpr Queue(VkQueue queue, const DeviceDispatch& dld) noexcept : queue{queue}, dld{&dld} {}
|
|
|
|
|
2020-03-30 04:14:35 -04:00
|
|
|
VkResult Submit(Span<VkSubmitInfo> submit_infos, VkFence fence) const noexcept {
|
|
|
|
return dld->vkQueueSubmit(queue, submit_infos.size(), submit_infos.data(), fence);
|
2020-03-31 19:27:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
VkResult Present(const VkPresentInfoKHR& present_info) const noexcept {
|
|
|
|
return dld->vkQueuePresentKHR(queue, &present_info);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
VkQueue queue = nullptr;
|
|
|
|
const DeviceDispatch* dld = nullptr;
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:28:33 -04:00
|
|
|
class Buffer : public Handle<VkBuffer, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkBuffer, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
/// Attaches a memory allocation.
|
|
|
|
void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
|
|
|
|
};
|
|
|
|
|
|
|
|
class Image : public Handle<VkImage, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkImage, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
/// Attaches a memory allocation.
|
|
|
|
void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:58:08 -04:00
|
|
|
class DeviceMemory : public Handle<VkDeviceMemory, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkDeviceMemory, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
u8* Map(VkDeviceSize offset, VkDeviceSize size) const {
|
|
|
|
void* data;
|
|
|
|
Check(dld->vkMapMemory(owner, handle, offset, size, 0, &data));
|
|
|
|
return static_cast<u8*>(data);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Unmap() const noexcept {
|
|
|
|
dld->vkUnmapMemory(owner, handle);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:58:27 -04:00
|
|
|
class Fence : public Handle<VkFence, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkFence, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
VkResult Wait(u64 timeout = std::numeric_limits<u64>::max()) const noexcept {
|
|
|
|
return dld->vkWaitForFences(owner, 1, &handle, true, timeout);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult GetStatus() const noexcept {
|
|
|
|
return dld->vkGetFenceStatus(owner, handle);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Reset() const {
|
|
|
|
Check(dld->vkResetFences(owner, 1, &handle));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:29:19 -04:00
|
|
|
class DescriptorPool : public Handle<VkDescriptorPool, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkDescriptorPool, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
DescriptorSets Allocate(const VkDescriptorSetAllocateInfo& ai) const;
|
|
|
|
};
|
|
|
|
|
|
|
|
class CommandPool : public Handle<VkCommandPool, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkCommandPool, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
CommandBuffers Allocate(std::size_t num_buffers,
|
|
|
|
VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY) const;
|
|
|
|
};
|
|
|
|
|
2020-03-31 19:59:28 -04:00
|
|
|
class SwapchainKHR : public Handle<VkSwapchainKHR, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkSwapchainKHR, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
std::vector<VkImage> GetImages() const;
|
|
|
|
};
|
|
|
|
|
2020-03-16 20:43:05 -04:00
|
|
|
class Event : public Handle<VkEvent, VkDevice, DeviceDispatch> {
|
|
|
|
using Handle<VkEvent, VkDevice, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
VkResult GetStatus() const noexcept {
|
|
|
|
return dld->vkGetEventStatus(owner, handle);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-31 20:23:04 -04:00
|
|
|
class Device : public Handle<VkDevice, NoOwner, DeviceDispatch> {
|
|
|
|
using Handle<VkDevice, NoOwner, DeviceDispatch>::Handle;
|
|
|
|
|
|
|
|
public:
|
|
|
|
static Device Create(VkPhysicalDevice physical_device, Span<VkDeviceQueueCreateInfo> queues_ci,
|
2020-03-30 04:21:59 -04:00
|
|
|
Span<const char*> enabled_extensions, const void* next,
|
2020-03-31 20:23:04 -04:00
|
|
|
DeviceDispatch& dld) noexcept;
|
|
|
|
|
|
|
|
Queue GetQueue(u32 family_index) const noexcept;
|
|
|
|
|
|
|
|
Buffer CreateBuffer(const VkBufferCreateInfo& ci) const;
|
|
|
|
|
|
|
|
BufferView CreateBufferView(const VkBufferViewCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Image CreateImage(const VkImageCreateInfo& ci) const;
|
|
|
|
|
|
|
|
ImageView CreateImageView(const VkImageViewCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Semaphore CreateSemaphore() const;
|
|
|
|
|
|
|
|
Fence CreateFence(const VkFenceCreateInfo& ci) const;
|
|
|
|
|
|
|
|
DescriptorPool CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const;
|
|
|
|
|
|
|
|
RenderPass CreateRenderPass(const VkRenderPassCreateInfo& ci) const;
|
|
|
|
|
|
|
|
DescriptorSetLayout CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo& ci) const;
|
|
|
|
|
|
|
|
PipelineLayout CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Pipeline CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Pipeline CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Sampler CreateSampler(const VkSamplerCreateInfo& ci) const;
|
|
|
|
|
|
|
|
Framebuffer CreateFramebuffer(const VkFramebufferCreateInfo& ci) const;
|
|
|
|
|
|
|
|
CommandPool CreateCommandPool(const VkCommandPoolCreateInfo& ci) const;
|
|
|
|
|
|
|
|
DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR(
|
|
|
|
const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const;
|
|
|
|
|
|
|
|
QueryPool CreateQueryPool(const VkQueryPoolCreateInfo& ci) const;
|
|
|
|
|
|
|
|
ShaderModule CreateShaderModule(const VkShaderModuleCreateInfo& ci) const;
|
|
|
|
|
2020-03-16 20:43:05 -04:00
|
|
|
Event CreateEvent() const;
|
|
|
|
|
2020-03-31 20:23:04 -04:00
|
|
|
SwapchainKHR CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const;
|
|
|
|
|
|
|
|
DeviceMemory TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept;
|
|
|
|
|
|
|
|
DeviceMemory AllocateMemory(const VkMemoryAllocateInfo& ai) const;
|
|
|
|
|
|
|
|
VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer buffer) const noexcept;
|
|
|
|
|
|
|
|
VkMemoryRequirements GetImageMemoryRequirements(VkImage image) const noexcept;
|
|
|
|
|
|
|
|
void UpdateDescriptorSets(Span<VkWriteDescriptorSet> writes,
|
|
|
|
Span<VkCopyDescriptorSet> copies) const noexcept;
|
|
|
|
|
|
|
|
void UpdateDescriptorSet(VkDescriptorSet set, VkDescriptorUpdateTemplateKHR update_template,
|
|
|
|
const void* data) const noexcept {
|
|
|
|
dld->vkUpdateDescriptorSetWithTemplateKHR(handle, set, update_template, data);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult AcquireNextImageKHR(VkSwapchainKHR swapchain, u64 timeout, VkSemaphore semaphore,
|
|
|
|
VkFence fence, u32* image_index) const noexcept {
|
|
|
|
return dld->vkAcquireNextImageKHR(handle, swapchain, timeout, semaphore, fence,
|
|
|
|
image_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
VkResult WaitIdle() const noexcept {
|
|
|
|
return dld->vkDeviceWaitIdle(handle);
|
|
|
|
}
|
|
|
|
|
|
|
|
void ResetQueryPoolEXT(VkQueryPool query_pool, u32 first, u32 count) const noexcept {
|
|
|
|
dld->vkResetQueryPoolEXT(handle, query_pool, first, count);
|
|
|
|
}
|
|
|
|
|
2020-03-30 04:14:35 -04:00
|
|
|
VkResult GetQueryResults(VkQueryPool query_pool, u32 first, u32 count, std::size_t data_size,
|
|
|
|
void* data, VkDeviceSize stride, VkQueryResultFlags flags) const
|
|
|
|
noexcept {
|
|
|
|
return dld->vkGetQueryPoolResults(handle, query_pool, first, count, data_size, data, stride,
|
|
|
|
flags);
|
2020-03-31 20:23:04 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-31 20:26:22 -04:00
|
|
|
class PhysicalDevice {
|
|
|
|
public:
|
|
|
|
constexpr PhysicalDevice() noexcept = default;
|
|
|
|
|
|
|
|
constexpr PhysicalDevice(VkPhysicalDevice physical_device, const InstanceDispatch& dld) noexcept
|
|
|
|
: physical_device{physical_device}, dld{&dld} {}
|
|
|
|
|
|
|
|
constexpr operator VkPhysicalDevice() const noexcept {
|
|
|
|
return physical_device;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkPhysicalDeviceProperties GetProperties() const noexcept;
|
|
|
|
|
|
|
|
void GetProperties2KHR(VkPhysicalDeviceProperties2KHR&) const noexcept;
|
|
|
|
|
|
|
|
VkPhysicalDeviceFeatures GetFeatures() const noexcept;
|
|
|
|
|
|
|
|
void GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR&) const noexcept;
|
|
|
|
|
|
|
|
VkFormatProperties GetFormatProperties(VkFormat) const noexcept;
|
|
|
|
|
|
|
|
std::vector<VkExtensionProperties> EnumerateDeviceExtensionProperties() const;
|
|
|
|
|
|
|
|
std::vector<VkQueueFamilyProperties> GetQueueFamilyProperties() const;
|
|
|
|
|
|
|
|
bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const;
|
|
|
|
|
2020-06-19 23:01:56 -04:00
|
|
|
VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const;
|
2020-03-31 20:26:22 -04:00
|
|
|
|
|
|
|
std::vector<VkSurfaceFormatKHR> GetSurfaceFormatsKHR(VkSurfaceKHR) const;
|
|
|
|
|
|
|
|
std::vector<VkPresentModeKHR> GetSurfacePresentModesKHR(VkSurfaceKHR) const;
|
|
|
|
|
|
|
|
VkPhysicalDeviceMemoryProperties GetMemoryProperties() const noexcept;
|
|
|
|
|
|
|
|
private:
|
|
|
|
VkPhysicalDevice physical_device = nullptr;
|
|
|
|
const InstanceDispatch* dld = nullptr;
|
|
|
|
};
|
|
|
|
|
2020-03-31 20:29:13 -04:00
|
|
|
class CommandBuffer {
|
|
|
|
public:
|
|
|
|
CommandBuffer() noexcept = default;
|
|
|
|
|
|
|
|
explicit CommandBuffer(VkCommandBuffer handle, const DeviceDispatch& dld) noexcept
|
|
|
|
: handle{handle}, dld{&dld} {}
|
|
|
|
|
|
|
|
const VkCommandBuffer* address() const noexcept {
|
|
|
|
return &handle;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Begin(const VkCommandBufferBeginInfo& begin_info) const {
|
|
|
|
Check(dld->vkBeginCommandBuffer(handle, &begin_info));
|
|
|
|
}
|
|
|
|
|
|
|
|
void End() const {
|
|
|
|
Check(dld->vkEndCommandBuffer(handle));
|
|
|
|
}
|
|
|
|
|
|
|
|
void BeginRenderPass(const VkRenderPassBeginInfo& renderpass_bi,
|
|
|
|
VkSubpassContents contents) const noexcept {
|
|
|
|
dld->vkCmdBeginRenderPass(handle, &renderpass_bi, contents);
|
|
|
|
}
|
|
|
|
|
|
|
|
void EndRenderPass() const noexcept {
|
|
|
|
dld->vkCmdEndRenderPass(handle);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BeginQuery(VkQueryPool query_pool, u32 query, VkQueryControlFlags flags) const noexcept {
|
|
|
|
dld->vkCmdBeginQuery(handle, query_pool, query, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
void EndQuery(VkQueryPool query_pool, u32 query) const noexcept {
|
|
|
|
dld->vkCmdEndQuery(handle, query_pool, query);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BindDescriptorSets(VkPipelineBindPoint bind_point, VkPipelineLayout layout, u32 first,
|
|
|
|
Span<VkDescriptorSet> sets, Span<u32> dynamic_offsets) const noexcept {
|
|
|
|
dld->vkCmdBindDescriptorSets(handle, bind_point, layout, first, sets.size(), sets.data(),
|
|
|
|
dynamic_offsets.size(), dynamic_offsets.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept {
|
|
|
|
dld->vkCmdBindPipeline(handle, bind_point, pipeline);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BindIndexBuffer(VkBuffer buffer, VkDeviceSize offset, VkIndexType index_type) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdBindIndexBuffer(handle, buffer, offset, index_type);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BindVertexBuffers(u32 first, u32 count, const VkBuffer* buffers,
|
|
|
|
const VkDeviceSize* offsets) const noexcept {
|
|
|
|
dld->vkCmdBindVertexBuffers(handle, first, count, buffers, offsets);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BindVertexBuffer(u32 binding, VkBuffer buffer, VkDeviceSize offset) const noexcept {
|
|
|
|
BindVertexBuffers(binding, 1, &buffer, &offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Draw(u32 vertex_count, u32 instance_count, u32 first_vertex, u32 first_instance) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdDraw(handle, vertex_count, instance_count, first_vertex, first_instance);
|
|
|
|
}
|
|
|
|
|
|
|
|
void DrawIndexed(u32 index_count, u32 instance_count, u32 first_index, u32 vertex_offset,
|
|
|
|
u32 first_instance) const noexcept {
|
|
|
|
dld->vkCmdDrawIndexed(handle, index_count, instance_count, first_index, vertex_offset,
|
|
|
|
first_instance);
|
|
|
|
}
|
|
|
|
|
|
|
|
void ClearAttachments(Span<VkClearAttachment> attachments, Span<VkClearRect> rects) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdClearAttachments(handle, attachments.size(), attachments.data(), rects.size(),
|
|
|
|
rects.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void BlitImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
|
|
|
|
VkImageLayout dst_layout, Span<VkImageBlit> regions, VkFilter filter) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdBlitImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
|
|
|
|
regions.data(), filter);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Dispatch(u32 x, u32 y, u32 z) const noexcept {
|
|
|
|
dld->vkCmdDispatch(handle, x, y, z);
|
|
|
|
}
|
|
|
|
|
|
|
|
void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
|
|
|
|
VkDependencyFlags dependency_flags, Span<VkMemoryBarrier> memory_barriers,
|
|
|
|
Span<VkBufferMemoryBarrier> buffer_barriers,
|
|
|
|
Span<VkImageMemoryBarrier> image_barriers) const noexcept {
|
|
|
|
dld->vkCmdPipelineBarrier(handle, src_stage_mask, dst_stage_mask, dependency_flags,
|
|
|
|
memory_barriers.size(), memory_barriers.data(),
|
|
|
|
buffer_barriers.size(), buffer_barriers.data(),
|
|
|
|
image_barriers.size(), image_barriers.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void CopyBufferToImage(VkBuffer src_buffer, VkImage dst_image, VkImageLayout dst_image_layout,
|
|
|
|
Span<VkBufferImageCopy> regions) const noexcept {
|
|
|
|
dld->vkCmdCopyBufferToImage(handle, src_buffer, dst_image, dst_image_layout, regions.size(),
|
|
|
|
regions.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void CopyBuffer(VkBuffer src_buffer, VkBuffer dst_buffer, Span<VkBufferCopy> regions) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdCopyBuffer(handle, src_buffer, dst_buffer, regions.size(), regions.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void CopyImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
|
|
|
|
VkImageLayout dst_layout, Span<VkImageCopy> regions) const noexcept {
|
|
|
|
dld->vkCmdCopyImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
|
|
|
|
regions.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void CopyImageToBuffer(VkImage src_image, VkImageLayout src_layout, VkBuffer dst_buffer,
|
|
|
|
Span<VkBufferImageCopy> regions) const noexcept {
|
|
|
|
dld->vkCmdCopyImageToBuffer(handle, src_image, src_layout, dst_buffer, regions.size(),
|
|
|
|
regions.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void FillBuffer(VkBuffer dst_buffer, VkDeviceSize dst_offset, VkDeviceSize size, u32 data) const
|
|
|
|
noexcept {
|
|
|
|
dld->vkCmdFillBuffer(handle, dst_buffer, dst_offset, size, data);
|
|
|
|
}
|
|
|
|
|
|
|
|
void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, u32 offset, u32 size,
|
|
|
|
const void* values) const noexcept {
|
|
|
|
dld->vkCmdPushConstants(handle, layout, flags, offset, size, values);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetViewport(u32 first, Span<VkViewport> viewports) const noexcept {
|
|
|
|
dld->vkCmdSetViewport(handle, first, viewports.size(), viewports.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetScissor(u32 first, Span<VkRect2D> scissors) const noexcept {
|
|
|
|
dld->vkCmdSetScissor(handle, first, scissors.size(), scissors.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetBlendConstants(const float blend_constants[4]) const noexcept {
|
|
|
|
dld->vkCmdSetBlendConstants(handle, blend_constants);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetStencilCompareMask(VkStencilFaceFlags face_mask, u32 compare_mask) const noexcept {
|
|
|
|
dld->vkCmdSetStencilCompareMask(handle, face_mask, compare_mask);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetStencilReference(VkStencilFaceFlags face_mask, u32 reference) const noexcept {
|
|
|
|
dld->vkCmdSetStencilReference(handle, face_mask, reference);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetStencilWriteMask(VkStencilFaceFlags face_mask, u32 write_mask) const noexcept {
|
|
|
|
dld->vkCmdSetStencilWriteMask(handle, face_mask, write_mask);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthBias(float constant_factor, float clamp, float slope_factor) const noexcept {
|
|
|
|
dld->vkCmdSetDepthBias(handle, constant_factor, clamp, slope_factor);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthBounds(float min_depth_bounds, float max_depth_bounds) const noexcept {
|
|
|
|
dld->vkCmdSetDepthBounds(handle, min_depth_bounds, max_depth_bounds);
|
|
|
|
}
|
|
|
|
|
2020-03-16 20:43:05 -04:00
|
|
|
void SetEvent(VkEvent event, VkPipelineStageFlags stage_flags) const noexcept {
|
|
|
|
dld->vkCmdSetEvent(handle, event, stage_flags);
|
|
|
|
}
|
|
|
|
|
2020-04-28 01:14:11 -04:00
|
|
|
void WaitEvents(Span<VkEvent> events, VkPipelineStageFlags src_stage_mask,
|
|
|
|
VkPipelineStageFlags dst_stage_mask, Span<VkMemoryBarrier> memory_barriers,
|
|
|
|
Span<VkBufferMemoryBarrier> buffer_barriers,
|
|
|
|
Span<VkImageMemoryBarrier> image_barriers) const noexcept {
|
|
|
|
dld->vkCmdWaitEvents(handle, events.size(), events.data(), src_stage_mask, dst_stage_mask,
|
|
|
|
memory_barriers.size(), memory_barriers.data(), buffer_barriers.size(),
|
|
|
|
buffer_barriers.data(), image_barriers.size(), image_barriers.data());
|
|
|
|
}
|
|
|
|
|
2020-06-21 20:17:56 -04:00
|
|
|
void BindVertexBuffers2EXT(u32 first_binding, u32 binding_count, const VkBuffer* buffers,
|
|
|
|
const VkDeviceSize* offsets, const VkDeviceSize* sizes,
|
|
|
|
const VkDeviceSize* strides) const noexcept {
|
|
|
|
dld->vkCmdBindVertexBuffers2EXT(handle, first_binding, binding_count, buffers, offsets,
|
|
|
|
sizes, strides);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetCullModeEXT(VkCullModeFlags cull_mode) const noexcept {
|
|
|
|
dld->vkCmdSetCullModeEXT(handle, cull_mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthBoundsTestEnableEXT(bool enable) const noexcept {
|
|
|
|
dld->vkCmdSetDepthBoundsTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthCompareOpEXT(VkCompareOp compare_op) const noexcept {
|
|
|
|
dld->vkCmdSetDepthCompareOpEXT(handle, compare_op);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthTestEnableEXT(bool enable) const noexcept {
|
|
|
|
dld->vkCmdSetDepthTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDepthWriteEnableEXT(bool enable) const noexcept {
|
|
|
|
dld->vkCmdSetDepthWriteEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetFrontFaceEXT(VkFrontFace front_face) const noexcept {
|
|
|
|
dld->vkCmdSetFrontFaceEXT(handle, front_face);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept {
|
|
|
|
dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetStencilOpEXT(VkStencilFaceFlags face_mask, VkStencilOp fail_op, VkStencilOp pass_op,
|
|
|
|
VkStencilOp depth_fail_op, VkCompareOp compare_op) const noexcept {
|
|
|
|
dld->vkCmdSetStencilOpEXT(handle, face_mask, fail_op, pass_op, depth_fail_op, compare_op);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetStencilTestEnableEXT(bool enable) const noexcept {
|
|
|
|
dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
|
|
|
|
}
|
|
|
|
|
2020-03-31 20:29:13 -04:00
|
|
|
void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers,
|
|
|
|
const VkDeviceSize* offsets,
|
|
|
|
const VkDeviceSize* sizes) const noexcept {
|
|
|
|
dld->vkCmdBindTransformFeedbackBuffersEXT(handle, first, count, buffers, offsets, sizes);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BeginTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
|
|
|
|
const VkBuffer* counter_buffers,
|
|
|
|
const VkDeviceSize* counter_buffer_offsets) const noexcept {
|
|
|
|
dld->vkCmdBeginTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
|
|
|
|
counter_buffers, counter_buffer_offsets);
|
|
|
|
}
|
|
|
|
|
|
|
|
void EndTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
|
|
|
|
const VkBuffer* counter_buffers,
|
|
|
|
const VkDeviceSize* counter_buffer_offsets) const noexcept {
|
|
|
|
dld->vkCmdEndTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
|
|
|
|
counter_buffers, counter_buffer_offsets);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
VkCommandBuffer handle;
|
|
|
|
const DeviceDispatch* dld;
|
|
|
|
};
|
|
|
|
|
2020-03-31 20:30:19 -04:00
|
|
|
std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
|
|
|
|
const InstanceDispatch& dld);
|
|
|
|
|
2020-06-22 03:10:45 -04:00
|
|
|
std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
|
|
|
|
const InstanceDispatch& dld);
|
|
|
|
|
2020-03-27 01:44:29 -04:00
|
|
|
} // namespace Vulkan::vk
|