Repo created
This commit is contained in:
parent
4af19165ec
commit
68073add76
12458 changed files with 12350765 additions and 2 deletions
1145
libs/drape/vulkan/vulkan_base_context.cpp
Normal file
1145
libs/drape/vulkan/vulkan_base_context.cpp
Normal file
File diff suppressed because it is too large
Load diff
226
libs/drape/vulkan/vulkan_base_context.hpp
Normal file
226
libs/drape/vulkan/vulkan_base_context.hpp
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/graphics_context.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_gpu_program.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
#include "drape/vulkan/vulkan_param_descriptor.hpp"
|
||||
#include "drape/vulkan/vulkan_pipeline.hpp"
|
||||
#include "drape/vulkan/vulkan_staging_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_texture.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "geometry/point2d.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <atomic>
|
||||
#include <cstdint>
|
||||
#include <functional>
|
||||
#include <map>
|
||||
#include <optional>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanBaseContext : public dp::GraphicsContext
|
||||
{
|
||||
public:
|
||||
VulkanBaseContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
|
||||
VkDevice device, uint32_t renderingQueueFamilyIndex, ref_ptr<VulkanObjectManager> objectManager,
|
||||
drape_ptr<VulkanPipeline> && pipeline, bool hasPartialTextureUpdates);
|
||||
~VulkanBaseContext() override;
|
||||
|
||||
using ContextHandler = std::function<void(uint32_t inflightFrameIndex)>;
|
||||
|
||||
bool BeginRendering() override;
|
||||
void EndRendering() override;
|
||||
void Present() override;
|
||||
void CollectMemory() override;
|
||||
void DoneCurrent() override {}
|
||||
bool Validate() override { return true; }
|
||||
void Resize(uint32_t w, uint32_t h) override;
|
||||
void SetFramebuffer(ref_ptr<dp::BaseFramebuffer> framebuffer) override;
|
||||
void ForgetFramebuffer(ref_ptr<dp::BaseFramebuffer> framebuffer) override;
|
||||
void ApplyFramebuffer(std::string const & framebufferLabel) override;
|
||||
void Init(ApiVersion apiVersion) override;
|
||||
void SetPresentAvailable(bool available) override;
|
||||
ApiVersion GetApiVersion() const override { return dp::ApiVersion::Vulkan; }
|
||||
std::string GetRendererName() const override;
|
||||
std::string GetRendererVersion() const override;
|
||||
bool HasPartialTextureUpdates() const override;
|
||||
|
||||
void DebugSynchronizeWithCPU() override {}
|
||||
void PushDebugLabel(std::string const & label) override {}
|
||||
void PopDebugLabel() override {}
|
||||
|
||||
void SetClearColor(Color const & color) override;
|
||||
void Clear(uint32_t clearBits, uint32_t storeBits) override;
|
||||
void Flush() override {}
|
||||
void SetViewport(uint32_t x, uint32_t y, uint32_t w, uint32_t h) override;
|
||||
void SetScissor(uint32_t x, uint32_t y, uint32_t w, uint32_t h) override;
|
||||
void SetDepthTestEnabled(bool enabled) override;
|
||||
void SetDepthTestFunction(TestFunction depthFunction) override;
|
||||
void SetStencilTestEnabled(bool enabled) override;
|
||||
void SetStencilFunction(StencilFace face, TestFunction stencilFunction) override;
|
||||
void SetStencilActions(StencilFace face, StencilAction stencilFailAction, StencilAction depthFailAction,
|
||||
StencilAction passAction) override;
|
||||
void SetStencilReferenceValue(uint32_t stencilReferenceValue) override;
|
||||
void SetCullingEnabled(bool enabled) override;
|
||||
|
||||
void SetPrimitiveTopology(VkPrimitiveTopology topology);
|
||||
void SetBindingInfo(BindingInfoArray const & bindingInfo, uint8_t bindingInfoCount);
|
||||
void SetProgram(ref_ptr<VulkanGpuProgram> program);
|
||||
void SetBlendingEnabled(bool blendingEnabled);
|
||||
|
||||
void ApplyParamDescriptor(ParamDescriptor && descriptor);
|
||||
void ClearParamDescriptors();
|
||||
|
||||
void SetSurface(VkSurfaceKHR surface, VkSurfaceFormatKHR surfaceFormat,
|
||||
VkSurfaceCapabilitiesKHR const & surfaceCapabilities);
|
||||
void ResetSurface(bool allowPipelineDump);
|
||||
|
||||
VkPhysicalDevice GetPhysicalDevice() const { return m_gpu; }
|
||||
VkDevice GetDevice() const { return m_device; }
|
||||
VkQueue GetQueue() const { return m_queue; }
|
||||
|
||||
VkPhysicalDeviceProperties const & GetGpuProperties() const { return m_gpuProperties; }
|
||||
uint32_t GetRenderingQueueFamilyIndex() { return m_renderingQueueFamilyIndex; }
|
||||
|
||||
ref_ptr<VulkanObjectManager> GetObjectManager() const { return m_objectManager; }
|
||||
|
||||
// The following methods return short-live objects. Typically they must not be stored.
|
||||
VkCommandBuffer GetCurrentMemoryCommandBuffer() const;
|
||||
VkCommandBuffer GetCurrentRenderingCommandBuffer() const;
|
||||
ref_ptr<VulkanStagingBuffer> GetDefaultStagingBuffer() const;
|
||||
|
||||
uint32_t GetCurrentInflightFrameIndex() const { return m_inflightFrameIndex; }
|
||||
|
||||
VkPipeline GetCurrentPipeline();
|
||||
VkPipelineLayout GetCurrentPipelineLayout() const;
|
||||
uint32_t GetCurrentDynamicBufferOffset() const;
|
||||
std::vector<ParamDescriptor> const & GetCurrentParamDescriptors() const;
|
||||
ref_ptr<VulkanGpuProgram> GetCurrentProgram() const { return m_pipelineKey.m_program; }
|
||||
uint32_t GetCurrentFrameIndex() const { return m_frameCounter; }
|
||||
|
||||
VkSampler GetSampler(SamplerKey const & key);
|
||||
|
||||
enum class HandlerType : uint8_t
|
||||
{
|
||||
PrePresent = 0,
|
||||
PostPresent,
|
||||
UpdateInflightFrame,
|
||||
|
||||
Count
|
||||
};
|
||||
uint32_t RegisterHandler(HandlerType handlerType, ContextHandler && handler);
|
||||
void UnregisterHandler(uint32_t id);
|
||||
|
||||
void ResetPipelineCache();
|
||||
|
||||
struct AttachmentOp
|
||||
{
|
||||
VkAttachmentLoadOp m_loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
VkAttachmentStoreOp m_storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
};
|
||||
|
||||
struct AttachmentsOperations
|
||||
{
|
||||
AttachmentOp m_color;
|
||||
AttachmentOp m_depth;
|
||||
AttachmentOp m_stencil;
|
||||
};
|
||||
|
||||
protected:
|
||||
void SetRenderingQueue(VkQueue queue);
|
||||
|
||||
void RecreateSwapchain();
|
||||
void DestroySwapchain();
|
||||
|
||||
void CreateCommandPool();
|
||||
void DestroyCommandPool();
|
||||
|
||||
void CreateCommandBuffers();
|
||||
void DestroyCommandBuffers();
|
||||
|
||||
void CreateSyncPrimitives();
|
||||
void DestroySyncPrimitives();
|
||||
|
||||
void DestroyRenderPassAndFramebuffers();
|
||||
void DestroyRenderPassAndFramebuffer(ref_ptr<BaseFramebuffer> framebuffer);
|
||||
|
||||
void RecreateDepthTexture();
|
||||
|
||||
void RecreateSwapchainAndDependencies();
|
||||
void ResetSwapchainAndDependencies();
|
||||
|
||||
AttachmentsOperations GetAttachmensOperations();
|
||||
|
||||
VkRenderPass CreateRenderPass(uint32_t attachmentsCount, AttachmentsOperations const & attachmentsOp,
|
||||
VkFormat colorFormat, VkImageLayout initLayout, VkImageLayout finalLayout,
|
||||
VkFormat depthFormat, VkImageLayout depthInitLayout, VkImageLayout depthFinalLayout);
|
||||
|
||||
VkInstance const m_vulkanInstance;
|
||||
VkPhysicalDevice const m_gpu;
|
||||
VkPhysicalDeviceProperties const m_gpuProperties;
|
||||
VkDevice const m_device;
|
||||
uint32_t const m_renderingQueueFamilyIndex;
|
||||
bool const m_hasPartialTextureUpdates;
|
||||
|
||||
VkQueue m_queue = {};
|
||||
VkCommandPool m_commandPool = {};
|
||||
bool m_isActiveRenderPass = false;
|
||||
|
||||
std::array<VkCommandBuffer, kMaxInflightFrames> m_renderingCommandBuffers = {};
|
||||
std::array<VkCommandBuffer, kMaxInflightFrames> m_memoryCommandBuffers = {};
|
||||
|
||||
// Swap chain image acquiring.
|
||||
std::array<VkSemaphore, kMaxInflightFrames> m_acquireSemaphores = {};
|
||||
// Command buffers submission and execution.
|
||||
std::array<VkSemaphore, kMaxInflightFrames> m_renderSemaphores = {};
|
||||
// All rendering tasks completion.
|
||||
std::array<VkFence, kMaxInflightFrames> m_fences = {};
|
||||
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
drape_ptr<VulkanPipeline> m_pipeline;
|
||||
std::optional<VkSurfaceKHR> m_surface;
|
||||
|
||||
VkSurfaceCapabilitiesKHR m_surfaceCapabilities;
|
||||
std::optional<VkSurfaceFormatKHR> m_surfaceFormat;
|
||||
|
||||
VkSwapchainKHR m_swapchain = {};
|
||||
std::vector<VkImageView> m_swapchainImageViews;
|
||||
std::vector<VkImage> m_swapchainImages;
|
||||
uint32_t m_imageIndex = 0;
|
||||
|
||||
drape_ptr<VulkanTexture> m_depthTexture;
|
||||
|
||||
uint32_t m_clearBits = 0;
|
||||
uint32_t m_storeBits = 0;
|
||||
Color m_clearColor;
|
||||
uint32_t m_stencilReferenceValue = 1;
|
||||
|
||||
ref_ptr<BaseFramebuffer> m_currentFramebuffer;
|
||||
|
||||
struct FramebufferData
|
||||
{
|
||||
VkRenderPass m_renderPass = {};
|
||||
uint16_t m_packedAttachmentOperations = 0;
|
||||
std::vector<VkFramebuffer> m_framebuffers = {};
|
||||
};
|
||||
std::map<ref_ptr<BaseFramebuffer>, FramebufferData> m_framebuffersData;
|
||||
|
||||
std::array<std::vector<std::pair<uint32_t, ContextHandler>>, static_cast<size_t>(HandlerType::Count)> m_handlers;
|
||||
|
||||
VulkanPipeline::PipelineKey m_pipelineKey;
|
||||
std::vector<ParamDescriptor> m_paramDescriptors;
|
||||
|
||||
std::array<drape_ptr<VulkanStagingBuffer>, kMaxInflightFrames> m_defaultStagingBuffers = {};
|
||||
std::atomic<bool> m_presentAvailable;
|
||||
uint32_t m_frameCounter = 0;
|
||||
bool m_needPresent = true;
|
||||
uint32_t m_inflightFrameIndex = 0;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
363
libs/drape/vulkan/vulkan_context_factory.cpp
Normal file
363
libs/drape/vulkan/vulkan_context_factory.cpp
Normal file
|
|
@ -0,0 +1,363 @@
|
|||
#include "drape/vulkan/vulkan_context_factory.hpp"
|
||||
|
||||
#include "drape/support_manager.hpp"
|
||||
#include "drape/vulkan/vulkan_pipeline.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/logging.hpp"
|
||||
#include "base/macros.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
class DrawVulkanContext : public dp::vulkan::VulkanBaseContext
|
||||
{
|
||||
public:
|
||||
DrawVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
|
||||
VkDevice device, uint32_t renderingQueueFamilyIndex,
|
||||
ref_ptr<dp::vulkan::VulkanObjectManager> objectManager, uint32_t appVersionCode,
|
||||
bool hasPartialTextureUpdates)
|
||||
: dp::vulkan::VulkanBaseContext(vulkanInstance, gpu, gpuProperties, device, renderingQueueFamilyIndex,
|
||||
objectManager, make_unique_dp<dp::vulkan::VulkanPipeline>(device, appVersionCode),
|
||||
hasPartialTextureUpdates)
|
||||
{
|
||||
VkQueue queue;
|
||||
vkGetDeviceQueue(device, renderingQueueFamilyIndex, 0, &queue);
|
||||
SetRenderingQueue(queue);
|
||||
CreateCommandPool();
|
||||
}
|
||||
|
||||
void MakeCurrent() override { m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Frontend); }
|
||||
};
|
||||
|
||||
class UploadVulkanContext : public dp::vulkan::VulkanBaseContext
|
||||
{
|
||||
public:
|
||||
UploadVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
|
||||
VkDevice device, uint32_t renderingQueueFamilyIndex,
|
||||
ref_ptr<dp::vulkan::VulkanObjectManager> objectManager, bool hasPartialTextureUpdates)
|
||||
: dp::vulkan::VulkanBaseContext(vulkanInstance, gpu, gpuProperties, device, renderingQueueFamilyIndex,
|
||||
objectManager, nullptr /* pipeline */, hasPartialTextureUpdates)
|
||||
{}
|
||||
|
||||
void MakeCurrent() override { m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Backend); }
|
||||
|
||||
void Present() override {}
|
||||
|
||||
void Resize(uint32_t w, uint32_t h) override {}
|
||||
void SetFramebuffer(ref_ptr<dp::BaseFramebuffer> framebuffer) override {}
|
||||
void Init(dp::ApiVersion apiVersion) override { CHECK_EQUAL(apiVersion, dp::ApiVersion::Vulkan, ()); }
|
||||
|
||||
void SetClearColor(dp::Color const & color) override {}
|
||||
void Clear(uint32_t clearBits, uint32_t storeBits) override {}
|
||||
void Flush() override {}
|
||||
void SetDepthTestEnabled(bool enabled) override {}
|
||||
void SetDepthTestFunction(dp::TestFunction depthFunction) override {}
|
||||
void SetStencilTestEnabled(bool enabled) override {}
|
||||
void SetStencilFunction(dp::StencilFace face, dp::TestFunction stencilFunction) override {}
|
||||
void SetStencilActions(dp::StencilFace face, dp::StencilAction stencilFailAction, dp::StencilAction depthFailAction,
|
||||
dp::StencilAction passAction) override
|
||||
{}
|
||||
};
|
||||
} // namespace
|
||||
|
||||
VulkanContextFactory::VulkanContextFactory(uint32_t appVersionCode, int sdkVersion, bool isCustomROM)
|
||||
{
|
||||
if (InitVulkan() == 0)
|
||||
{
|
||||
LOG_ERROR_VK("Could not initialize Vulkan library.");
|
||||
return;
|
||||
}
|
||||
|
||||
VkApplicationInfo appInfo = {};
|
||||
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
|
||||
appInfo.pNext = nullptr;
|
||||
appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
|
||||
appInfo.applicationVersion = appVersionCode;
|
||||
appInfo.engineVersion = appVersionCode;
|
||||
appInfo.pApplicationName = "CoMaps";
|
||||
appInfo.pEngineName = "Drape Engine";
|
||||
|
||||
bool enableDiagnostics = false;
|
||||
#ifdef ENABLE_VULKAN_DIAGNOSTICS
|
||||
enableDiagnostics = true;
|
||||
#endif
|
||||
m_layers = make_unique_dp<dp::vulkan::Layers>(enableDiagnostics);
|
||||
|
||||
VkInstanceCreateInfo instanceCreateInfo = {};
|
||||
instanceCreateInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
|
||||
instanceCreateInfo.pNext = nullptr;
|
||||
instanceCreateInfo.pApplicationInfo = &appInfo;
|
||||
instanceCreateInfo.enabledExtensionCount = m_layers->GetInstanceExtensionsCount();
|
||||
instanceCreateInfo.ppEnabledExtensionNames = m_layers->GetInstanceExtensions();
|
||||
instanceCreateInfo.enabledLayerCount = m_layers->GetInstanceLayersCount();
|
||||
instanceCreateInfo.ppEnabledLayerNames = m_layers->GetInstanceLayers();
|
||||
#if defined(OMIM_OS_MAC)
|
||||
instanceCreateInfo.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
|
||||
#endif
|
||||
|
||||
// Enable extra validation features.
|
||||
VkValidationFeaturesEXT validationFeatures = {};
|
||||
VkValidationFeatureEnableEXT const validationFeaturesEnabled[] = {
|
||||
VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT};
|
||||
if (m_layers->IsValidationFeaturesEnabled())
|
||||
{
|
||||
validationFeatures.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
|
||||
validationFeatures.pNext = nullptr;
|
||||
validationFeatures.enabledValidationFeatureCount = ARRAY_SIZE(validationFeaturesEnabled),
|
||||
validationFeatures.pEnabledValidationFeatures = validationFeaturesEnabled;
|
||||
|
||||
instanceCreateInfo.pNext = &validationFeatures;
|
||||
}
|
||||
|
||||
VkResult statusCode;
|
||||
statusCode = vkCreateInstance(&instanceCreateInfo, nullptr, &m_vulkanInstance);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkCreateInstance, statusCode);
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t gpuCount = 0;
|
||||
statusCode = vkEnumeratePhysicalDevices(m_vulkanInstance, &gpuCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS || gpuCount == 0)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumeratePhysicalDevices, statusCode);
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<VkPhysicalDevice> tmpGpus(gpuCount);
|
||||
statusCode = vkEnumeratePhysicalDevices(m_vulkanInstance, &gpuCount, tmpGpus.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumeratePhysicalDevices, statusCode);
|
||||
return;
|
||||
}
|
||||
m_gpu = tmpGpus[0];
|
||||
|
||||
VkPhysicalDeviceProperties gpuProperties;
|
||||
vkGetPhysicalDeviceProperties(m_gpu, &gpuProperties);
|
||||
dp::SupportManager::Version apiVersion{VK_VERSION_MAJOR(gpuProperties.apiVersion),
|
||||
VK_VERSION_MINOR(gpuProperties.apiVersion),
|
||||
VK_VERSION_PATCH(gpuProperties.apiVersion)};
|
||||
dp::SupportManager::Version driverVersion{VK_VERSION_MAJOR(gpuProperties.driverVersion),
|
||||
VK_VERSION_MINOR(gpuProperties.driverVersion),
|
||||
VK_VERSION_PATCH(gpuProperties.driverVersion)};
|
||||
if (dp::SupportManager::Instance().IsVulkanForbidden(gpuProperties.deviceName, apiVersion, driverVersion, isCustomROM,
|
||||
sdkVersion))
|
||||
{
|
||||
LOG_ERROR_VK("GPU/Driver configuration is not supported.");
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t queueFamilyCount;
|
||||
vkGetPhysicalDeviceQueueFamilyProperties(m_gpu, &queueFamilyCount, nullptr);
|
||||
if (queueFamilyCount == 0)
|
||||
{
|
||||
LOG_ERROR_VK("Any queue family wasn't found.");
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<VkQueueFamilyProperties> queueFamilyProperties(queueFamilyCount);
|
||||
vkGetPhysicalDeviceQueueFamilyProperties(m_gpu, &queueFamilyCount, queueFamilyProperties.data());
|
||||
|
||||
uint32_t renderingQueueFamilyIndex = 0;
|
||||
for (; renderingQueueFamilyIndex < queueFamilyCount; ++renderingQueueFamilyIndex)
|
||||
if (queueFamilyProperties[renderingQueueFamilyIndex].queueFlags & VK_QUEUE_GRAPHICS_BIT)
|
||||
break;
|
||||
if (renderingQueueFamilyIndex == queueFamilyCount)
|
||||
{
|
||||
LOG_ERROR_VK("Any queue family with VK_QUEUE_GRAPHICS_BIT wasn't found.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!dp::vulkan::VulkanFormatUnpacker::Init(m_gpu))
|
||||
return;
|
||||
|
||||
if (!m_layers->Initialize(m_vulkanInstance, m_gpu))
|
||||
return;
|
||||
|
||||
float priorities[] = {1.0f};
|
||||
VkDeviceQueueCreateInfo queueCreateInfo = {};
|
||||
queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
|
||||
queueCreateInfo.pNext = nullptr;
|
||||
queueCreateInfo.flags = 0;
|
||||
queueCreateInfo.queueCount = 1;
|
||||
queueCreateInfo.queueFamilyIndex = renderingQueueFamilyIndex;
|
||||
queueCreateInfo.pQueuePriorities = priorities;
|
||||
|
||||
VkPhysicalDeviceFeatures availableFeatures;
|
||||
vkGetPhysicalDeviceFeatures(m_gpu, &availableFeatures);
|
||||
if (!availableFeatures.wideLines)
|
||||
LOG(LWARNING, ("Widelines Vulkan feature is not supported."));
|
||||
|
||||
VkPhysicalDeviceFeatures enabledFeatures = {};
|
||||
enabledFeatures.wideLines = availableFeatures.wideLines;
|
||||
|
||||
VkDeviceCreateInfo deviceCreateInfo = {};
|
||||
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
|
||||
deviceCreateInfo.pNext = nullptr;
|
||||
deviceCreateInfo.queueCreateInfoCount = 1;
|
||||
deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
|
||||
deviceCreateInfo.enabledLayerCount = m_layers->GetDeviceLayersCount();
|
||||
deviceCreateInfo.ppEnabledLayerNames = m_layers->GetDeviceLayers();
|
||||
deviceCreateInfo.enabledExtensionCount = m_layers->GetDeviceExtensionsCount();
|
||||
deviceCreateInfo.ppEnabledExtensionNames = m_layers->GetDeviceExtensions();
|
||||
deviceCreateInfo.pEnabledFeatures = nullptr;
|
||||
if (enableDiagnostics)
|
||||
{
|
||||
enabledFeatures.robustBufferAccess = VK_TRUE;
|
||||
deviceCreateInfo.pEnabledFeatures = &enabledFeatures;
|
||||
}
|
||||
|
||||
statusCode = vkCreateDevice(m_gpu, &deviceCreateInfo, nullptr, &m_device);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkCreateDevice, statusCode);
|
||||
return;
|
||||
}
|
||||
|
||||
INIT_DEBUG_NAME_VK(m_vulkanInstance, m_device);
|
||||
|
||||
VkPhysicalDeviceMemoryProperties memoryProperties;
|
||||
vkGetPhysicalDeviceMemoryProperties(m_gpu, &memoryProperties);
|
||||
m_objectManager = make_unique_dp<dp::vulkan::VulkanObjectManager>(m_device, gpuProperties.limits, memoryProperties,
|
||||
renderingQueueFamilyIndex);
|
||||
|
||||
bool const hasPartialTextureUpdates = !dp::SupportManager::Instance().IsVulkanTexturePartialUpdateBuggy(
|
||||
sdkVersion, gpuProperties.deviceName, apiVersion, driverVersion);
|
||||
|
||||
m_drawContext =
|
||||
make_unique_dp<DrawVulkanContext>(m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
|
||||
make_ref(m_objectManager), appVersionCode, hasPartialTextureUpdates);
|
||||
m_uploadContext =
|
||||
make_unique_dp<UploadVulkanContext>(m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
|
||||
make_ref(m_objectManager), hasPartialTextureUpdates);
|
||||
}
|
||||
|
||||
VulkanContextFactory::~VulkanContextFactory()
|
||||
{
|
||||
m_drawContext.reset();
|
||||
m_uploadContext.reset();
|
||||
m_objectManager.reset();
|
||||
|
||||
if (m_device != nullptr)
|
||||
{
|
||||
vkDeviceWaitIdle(m_device);
|
||||
vkDestroyDevice(m_device, nullptr);
|
||||
}
|
||||
|
||||
if (m_vulkanInstance != nullptr)
|
||||
{
|
||||
m_layers->Uninitialize(m_vulkanInstance);
|
||||
vkDestroyInstance(m_vulkanInstance, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
bool VulkanContextFactory::IsVulkanSupported() const
|
||||
{
|
||||
return m_vulkanInstance != nullptr && m_gpu != nullptr && m_device != nullptr;
|
||||
}
|
||||
|
||||
dp::GraphicsContext * VulkanContextFactory::GetDrawContext()
|
||||
{
|
||||
return m_drawContext.get();
|
||||
}
|
||||
|
||||
dp::GraphicsContext * VulkanContextFactory::GetResourcesUploadContext()
|
||||
{
|
||||
return m_uploadContext.get();
|
||||
}
|
||||
|
||||
bool VulkanContextFactory::IsDrawContextCreated() const
|
||||
{
|
||||
return m_drawContext != nullptr;
|
||||
}
|
||||
|
||||
bool VulkanContextFactory::IsUploadContextCreated() const
|
||||
{
|
||||
return m_uploadContext != nullptr;
|
||||
}
|
||||
|
||||
void VulkanContextFactory::SetPresentAvailable(bool available)
|
||||
{
|
||||
if (m_drawContext)
|
||||
m_drawContext->SetPresentAvailable(available);
|
||||
}
|
||||
|
||||
bool VulkanContextFactory::QuerySurfaceSize()
|
||||
{
|
||||
auto statusCode = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_gpu, m_surface, &m_surfaceCapabilities);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkGetPhysicalDeviceSurfaceCapabilitiesKHR, statusCode);
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t formatCount = 0;
|
||||
statusCode = vkGetPhysicalDeviceSurfaceFormatsKHR(m_gpu, m_surface, &formatCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkGetPhysicalDeviceSurfaceFormatsKHR, statusCode);
|
||||
return false;
|
||||
}
|
||||
|
||||
std::vector<VkSurfaceFormatKHR> formats(formatCount);
|
||||
statusCode = vkGetPhysicalDeviceSurfaceFormatsKHR(m_gpu, m_surface, &formatCount, formats.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkGetPhysicalDeviceSurfaceFormatsKHR, statusCode);
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t chosenFormat;
|
||||
for (chosenFormat = 0; chosenFormat < formatCount; chosenFormat++)
|
||||
{
|
||||
#if defined(OMIM_OS_MAC) || defined(OMIM_OS_LINUX)
|
||||
if (formats[chosenFormat].format == VK_FORMAT_B8G8R8A8_UNORM)
|
||||
break;
|
||||
#else
|
||||
if (formats[chosenFormat].format == VK_FORMAT_R8G8B8A8_UNORM)
|
||||
break;
|
||||
#endif
|
||||
}
|
||||
if (chosenFormat == formatCount)
|
||||
{
|
||||
LOG_ERROR_VK("Any supported surface format wasn't found.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!(m_surfaceCapabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR))
|
||||
{
|
||||
LOG_ERROR_VK("Alpha channel is not supported.");
|
||||
return false;
|
||||
}
|
||||
|
||||
m_surfaceFormat = formats[chosenFormat];
|
||||
m_surfaceWidth = static_cast<int>(m_surfaceCapabilities.currentExtent.width);
|
||||
m_surfaceHeight = static_cast<int>(m_surfaceCapabilities.currentExtent.height);
|
||||
return true;
|
||||
}
|
||||
|
||||
int VulkanContextFactory::GetWidth() const
|
||||
{
|
||||
return m_surfaceWidth;
|
||||
}
|
||||
|
||||
int VulkanContextFactory::GetHeight() const
|
||||
{
|
||||
return m_surfaceHeight;
|
||||
}
|
||||
|
||||
VkInstance VulkanContextFactory::GetVulkanInstance() const
|
||||
{
|
||||
return m_vulkanInstance;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
51
libs/drape/vulkan/vulkan_context_factory.hpp
Normal file
51
libs/drape/vulkan/vulkan_context_factory.hpp
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/graphics_context_factory.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
#include "drape/vulkan/vulkan_layers.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanContextFactory : public dp::GraphicsContextFactory
|
||||
{
|
||||
public:
|
||||
VulkanContextFactory(uint32_t appVersionCode, int sdkVersion, bool isCustomROM);
|
||||
~VulkanContextFactory() override;
|
||||
|
||||
bool IsVulkanSupported() const;
|
||||
|
||||
dp::GraphicsContext * GetDrawContext() override;
|
||||
dp::GraphicsContext * GetResourcesUploadContext() override;
|
||||
bool IsDrawContextCreated() const override;
|
||||
bool IsUploadContextCreated() const override;
|
||||
void SetPresentAvailable(bool available) override;
|
||||
|
||||
int GetWidth() const;
|
||||
int GetHeight() const;
|
||||
|
||||
VkInstance GetVulkanInstance() const;
|
||||
|
||||
protected:
|
||||
bool QuerySurfaceSize();
|
||||
|
||||
VkInstance m_vulkanInstance = nullptr;
|
||||
drape_ptr<dp::vulkan::Layers> m_layers;
|
||||
VkPhysicalDevice m_gpu = nullptr;
|
||||
VkDevice m_device = nullptr;
|
||||
drape_ptr<dp::vulkan::VulkanObjectManager> m_objectManager;
|
||||
drape_ptr<dp::vulkan::VulkanBaseContext> m_drawContext;
|
||||
drape_ptr<dp::vulkan::VulkanBaseContext> m_uploadContext;
|
||||
|
||||
VkSurfaceKHR m_surface = 0;
|
||||
VkSurfaceFormatKHR m_surfaceFormat;
|
||||
VkSurfaceCapabilitiesKHR m_surfaceCapabilities;
|
||||
|
||||
int m_surfaceWidth = 0;
|
||||
int m_surfaceHeight = 0;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
143
libs/drape/vulkan/vulkan_gpu_buffer_impl.cpp
Normal file
143
libs/drape/vulkan/vulkan_gpu_buffer_impl.cpp
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
#include "drape/vulkan/vulkan_gpu_buffer_impl.hpp"
|
||||
#include "drape/vulkan/vulkan_staging_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/macros.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
VulkanGPUBuffer::VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data, uint8_t elementSize,
|
||||
uint32_t capacity, uint64_t batcherHash)
|
||||
: BufferBase(elementSize, capacity)
|
||||
, m_batcherHash(batcherHash)
|
||||
{
|
||||
m_regionsToCopy.reserve(5);
|
||||
Resize(context, data, capacity);
|
||||
}
|
||||
|
||||
VulkanGPUBuffer::~VulkanGPUBuffer()
|
||||
{
|
||||
CHECK(m_objectManager != nullptr, ());
|
||||
m_objectManager->DestroyObject(m_geometryBuffer);
|
||||
}
|
||||
|
||||
void * VulkanGPUBuffer::Map(ref_ptr<VulkanBaseContext> context, uint32_t elementOffset, uint32_t elementCount)
|
||||
{
|
||||
CHECK(m_objectManager != nullptr, ());
|
||||
|
||||
uint32_t const elementSize = GetElementSize();
|
||||
uint32_t const mappingSizeInBytes = elementCount * elementSize;
|
||||
m_mappingByteOffset = elementOffset * elementSize;
|
||||
m_mappingByteOffsetMin = std::numeric_limits<uint32_t>::max();
|
||||
m_mappingByteOffsetMax = std::numeric_limits<uint32_t>::min();
|
||||
|
||||
VkCommandBuffer commandBuffer = context->GetCurrentMemoryCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
// Copy to default or temporary staging buffer.
|
||||
m_stagingBufferRef = context->GetDefaultStagingBuffer();
|
||||
if (!m_stagingBufferRef->HasEnoughSpace(mappingSizeInBytes))
|
||||
{
|
||||
m_ownStagingBuffer = make_unique_dp<VulkanStagingBuffer>(m_objectManager, mappingSizeInBytes);
|
||||
ASSERT(m_ownStagingBuffer->HasEnoughSpace(mappingSizeInBytes), ());
|
||||
m_stagingBufferRef = make_ref(m_ownStagingBuffer);
|
||||
}
|
||||
|
||||
VulkanStagingBuffer::StagingData data;
|
||||
m_reservationId = m_stagingBufferRef->ReserveWithId(mappingSizeInBytes, data);
|
||||
return data.m_pointer;
|
||||
}
|
||||
|
||||
void VulkanGPUBuffer::UpdateData(void * gpuPtr, void const * data, uint32_t elementOffset, uint32_t elementCount)
|
||||
{
|
||||
CHECK(gpuPtr != nullptr, ());
|
||||
CHECK(m_stagingBufferRef != nullptr, ());
|
||||
uint32_t const elementSize = GetElementSize();
|
||||
uint32_t const byteOffset = elementOffset * elementSize;
|
||||
uint32_t const byteCount = elementCount * elementSize;
|
||||
memcpy(static_cast<uint8_t *>(gpuPtr) + byteOffset, data, byteCount);
|
||||
|
||||
uint32_t const baseSrcOffset = m_stagingBufferRef->GetReservationById(m_reservationId).m_offset;
|
||||
VkBufferCopy copyRegion = {};
|
||||
copyRegion.dstOffset = m_mappingByteOffset + byteOffset;
|
||||
copyRegion.srcOffset = baseSrcOffset + byteOffset;
|
||||
copyRegion.size = byteCount;
|
||||
|
||||
m_mappingByteOffsetMin = std::min(m_mappingByteOffsetMin, static_cast<uint32_t>(copyRegion.dstOffset));
|
||||
m_mappingByteOffsetMax =
|
||||
std::max(m_mappingByteOffsetMax, static_cast<uint32_t>(copyRegion.dstOffset + copyRegion.size));
|
||||
|
||||
m_regionsToCopy.push_back(std::move(copyRegion));
|
||||
}
|
||||
|
||||
void VulkanGPUBuffer::Unmap(ref_ptr<VulkanBaseContext> context)
|
||||
{
|
||||
VkCommandBuffer commandBuffer = context->GetCurrentMemoryCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
VkBuffer stagingBuffer = m_stagingBufferRef->GetReservationById(m_reservationId).m_stagingBuffer;
|
||||
if (m_ownStagingBuffer != nullptr)
|
||||
{
|
||||
m_ownStagingBuffer->Flush();
|
||||
m_ownStagingBuffer.reset();
|
||||
}
|
||||
m_stagingBufferRef = nullptr;
|
||||
|
||||
CHECK_LESS(m_mappingByteOffsetMin, m_mappingByteOffsetMax, ());
|
||||
|
||||
// Set up a barrier to prevent data collisions (write-after-write, write-after-read).
|
||||
VkBufferMemoryBarrier barrier = {};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
|
||||
barrier.pNext = nullptr;
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDEX_READ_BIT;
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.buffer = m_geometryBuffer.m_buffer;
|
||||
barrier.offset = m_mappingByteOffsetMin;
|
||||
barrier.size = m_mappingByteOffsetMax - m_mappingByteOffsetMin;
|
||||
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &barrier, 0, nullptr);
|
||||
|
||||
// Schedule command to copy from the staging buffer to the geometry buffer.
|
||||
vkCmdCopyBuffer(commandBuffer, stagingBuffer, m_geometryBuffer.m_buffer,
|
||||
static_cast<uint32_t>(m_regionsToCopy.size()), m_regionsToCopy.data());
|
||||
|
||||
// Set up barriers to prevent data collisions (read-after-write).
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
|
||||
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, nullptr,
|
||||
1, &barrier, 0, nullptr);
|
||||
|
||||
m_mappingByteOffset = 0;
|
||||
m_regionsToCopy.clear();
|
||||
}
|
||||
|
||||
void VulkanGPUBuffer::Resize(ref_ptr<VulkanBaseContext> context, void const * data, uint32_t elementCount)
|
||||
{
|
||||
BufferBase::Resize(elementCount);
|
||||
|
||||
m_objectManager = context->GetObjectManager();
|
||||
uint32_t const sizeInBytes = GetCapacity() * GetElementSize();
|
||||
|
||||
m_geometryBuffer =
|
||||
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, m_batcherHash);
|
||||
m_objectManager->Fill(m_geometryBuffer, data, sizeInBytes);
|
||||
|
||||
// If we have already set up data, we have to call SetDataSize.
|
||||
if (data != nullptr)
|
||||
SetDataSize(elementCount);
|
||||
}
|
||||
} // namespace vulkan
|
||||
|
||||
drape_ptr<DataBufferBase> DataBuffer::CreateImplForVulkan(ref_ptr<GraphicsContext> context, void const * data,
|
||||
uint8_t elementSize, uint32_t capacity, uint64_t batcherHash)
|
||||
{
|
||||
return make_unique_dp<vulkan::VulkanGpuBufferImpl>(context, data, elementSize, capacity, batcherHash);
|
||||
}
|
||||
} // namespace dp
|
||||
99
libs/drape/vulkan/vulkan_gpu_buffer_impl.hpp
Normal file
99
libs/drape/vulkan/vulkan_gpu_buffer_impl.hpp
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/data_buffer.hpp"
|
||||
#include "drape/data_buffer_impl.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanStagingBuffer;
|
||||
|
||||
class VulkanGPUBuffer : public BufferBase
|
||||
{
|
||||
public:
|
||||
VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data, uint8_t elementSize, uint32_t capacity,
|
||||
uint64_t batcherHash);
|
||||
~VulkanGPUBuffer() override;
|
||||
|
||||
void * Map(ref_ptr<VulkanBaseContext> context, uint32_t elementOffset, uint32_t elementCount);
|
||||
void UpdateData(void * gpuPtr, void const * data, uint32_t elementOffset, uint32_t elementCount);
|
||||
void Unmap(ref_ptr<VulkanBaseContext> context);
|
||||
|
||||
void Advance(uint32_t elementCount) { BufferBase::UploadData(elementCount); }
|
||||
|
||||
VkBuffer GetVulkanBuffer() const { return m_geometryBuffer.m_buffer; }
|
||||
|
||||
protected:
|
||||
void Resize(ref_ptr<VulkanBaseContext> context, void const * data, uint32_t elementCount);
|
||||
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
VulkanObject m_geometryBuffer;
|
||||
uint64_t m_batcherHash;
|
||||
|
||||
ref_ptr<VulkanStagingBuffer> m_stagingBufferRef;
|
||||
drape_ptr<VulkanStagingBuffer> m_ownStagingBuffer;
|
||||
uint32_t m_reservationId = 0;
|
||||
uint32_t m_mappingByteOffset = 0;
|
||||
uint32_t m_mappingByteOffsetMin = std::numeric_limits<uint32_t>::max();
|
||||
uint32_t m_mappingByteOffsetMax = std::numeric_limits<uint32_t>::min();
|
||||
std::vector<VkBufferCopy> m_regionsToCopy;
|
||||
};
|
||||
|
||||
class VulkanGpuBufferImpl : public DataBufferImpl<VulkanGPUBuffer>
|
||||
{
|
||||
public:
|
||||
template <typename... Args>
|
||||
VulkanGpuBufferImpl(Args &&... params) : DataBufferImpl(std::forward<Args>(params)...)
|
||||
{}
|
||||
|
||||
void const * Data() const override
|
||||
{
|
||||
ASSERT(false, ("Retrieving of raw data is unavailable for GPU buffer"));
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void UploadData(ref_ptr<GraphicsContext> context, void const * data, uint32_t elementCount) override
|
||||
{
|
||||
// In Vulkan we must call upload only from FR.
|
||||
ref_ptr<VulkanBaseContext> vulkanContext = context;
|
||||
CHECK(vulkanContext->GetCurrentMemoryCommandBuffer() != nullptr, ());
|
||||
|
||||
uint32_t const currentSize = m_buffer->GetCurrentSize();
|
||||
ASSERT(m_buffer->GetCapacity() >= elementCount + currentSize,
|
||||
("Not enough memory to upload ", elementCount, " elements"));
|
||||
auto ptr = m_buffer->Map(context, currentSize, elementCount);
|
||||
CHECK(ptr != nullptr, ());
|
||||
m_buffer->UpdateData(ptr, data, 0, elementCount);
|
||||
m_buffer->Unmap(context);
|
||||
m_buffer->Advance(elementCount);
|
||||
}
|
||||
|
||||
void UpdateData(void * destPtr, void const * srcPtr, uint32_t elementOffset, uint32_t elementCount) override
|
||||
{
|
||||
m_buffer->UpdateData(destPtr, srcPtr, elementOffset, elementCount);
|
||||
}
|
||||
|
||||
void * Map(ref_ptr<GraphicsContext> context, uint32_t elementOffset, uint32_t elementCount) override
|
||||
{
|
||||
return m_buffer->Map(context, elementOffset, elementCount);
|
||||
}
|
||||
|
||||
void Unmap(ref_ptr<GraphicsContext> context) override { m_buffer->Unmap(context); }
|
||||
|
||||
void Bind() override {}
|
||||
|
||||
VkBuffer GetVulkanBuffer() const { return m_buffer->GetVulkanBuffer(); }
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
53
libs/drape/vulkan/vulkan_gpu_program.hpp
Normal file
53
libs/drape/vulkan/vulkan_gpu_program.hpp
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/gpu_program.hpp"
|
||||
|
||||
#include "base/visitor.hpp"
|
||||
|
||||
#include <vulkan_wrapper.h>
|
||||
|
||||
#include <array>
|
||||
#include <string>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanGpuProgram : public GpuProgram
|
||||
{
|
||||
public:
|
||||
using TextureBindings = std::unordered_map<std::string, int8_t>;
|
||||
|
||||
VulkanGpuProgram(std::string const & programName, VkPipelineShaderStageCreateInfo const & vertexShader,
|
||||
VkPipelineShaderStageCreateInfo const & fragmentShader, VkDescriptorSetLayout descriptorSetLayout,
|
||||
VkPipelineLayout pipelineLayout, TextureBindings const & textureBindings)
|
||||
: GpuProgram(programName)
|
||||
, m_vertexShader(vertexShader)
|
||||
, m_fragmentShader(fragmentShader)
|
||||
, m_descriptorSetLayout(descriptorSetLayout)
|
||||
, m_pipelineLayout(pipelineLayout)
|
||||
, m_textureBindings(textureBindings)
|
||||
{}
|
||||
|
||||
void Bind() override {}
|
||||
void Unbind() override {}
|
||||
|
||||
std::array<VkPipelineShaderStageCreateInfo, 2> GetShaders() const { return {{m_vertexShader, m_fragmentShader}}; }
|
||||
|
||||
VkDescriptorSetLayout GetDescriptorSetLayout() const { return m_descriptorSetLayout; }
|
||||
|
||||
VkPipelineLayout GetPipelineLayout() const { return m_pipelineLayout; }
|
||||
|
||||
TextureBindings const & GetTextureBindings() const { return m_textureBindings; }
|
||||
|
||||
private:
|
||||
// These objects aren't owned to this class, so must not be destroyed here.
|
||||
VkPipelineShaderStageCreateInfo m_vertexShader;
|
||||
VkPipelineShaderStageCreateInfo m_fragmentShader;
|
||||
VkDescriptorSetLayout m_descriptorSetLayout;
|
||||
VkPipelineLayout m_pipelineLayout;
|
||||
|
||||
TextureBindings m_textureBindings;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
444
libs/drape/vulkan/vulkan_layers.cpp
Normal file
444
libs/drape/vulkan/vulkan_layers.cpp
Normal file
|
|
@ -0,0 +1,444 @@
|
|||
#include "drape/vulkan/vulkan_layers.hpp"
|
||||
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/macros.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
char const * kDebugReportExtension = "VK_EXT_debug_report";
|
||||
char const * kValidationFeaturesExtension = "VK_EXT_validation_features";
|
||||
|
||||
char const * const kInstanceExtensions[] = {
|
||||
"VK_KHR_surface",
|
||||
"VK_KHR_android_surface",
|
||||
kDebugReportExtension,
|
||||
kValidationFeaturesExtension,
|
||||
#if defined(OMIM_OS_MAC) || defined(OMIM_OS_LINUX)
|
||||
"VK_EXT_debug_utils",
|
||||
#endif
|
||||
#if defined(OMIM_OS_MAC)
|
||||
"VK_KHR_portability_enumeration",
|
||||
"VK_MVK_macos_surface",
|
||||
"VK_KHR_get_physical_device_properties2",
|
||||
#endif
|
||||
#if defined(OMIM_OS_LINUX)
|
||||
"VK_KHR_xlib_surface",
|
||||
#endif
|
||||
};
|
||||
|
||||
char const * const kDeviceExtensions[] = {
|
||||
"VK_KHR_swapchain",
|
||||
#if defined(OMIM_OS_MAC)
|
||||
"VK_KHR_portability_subset",
|
||||
#endif
|
||||
};
|
||||
|
||||
char const * const kValidationLayers[] = {
|
||||
"VK_LAYER_KHRONOS_validation",
|
||||
};
|
||||
|
||||
std::vector<char const *> CheckLayers(std::vector<VkLayerProperties> const & props)
|
||||
{
|
||||
std::vector<char const *> result;
|
||||
result.reserve(props.size());
|
||||
for (uint32_t i = 0; i < ARRAY_SIZE(kValidationLayers); ++i)
|
||||
{
|
||||
auto const it = std::find_if(props.begin(), props.end(), [i](VkLayerProperties const & p)
|
||||
{ return strcmp(kValidationLayers[i], p.layerName) == 0; });
|
||||
if (it != props.end())
|
||||
result.push_back(kValidationLayers[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
std::vector<char const *> CheckExtensions(std::vector<VkExtensionProperties> const & props, bool enableDiagnostics,
|
||||
char const * const * extensions, uint32_t extensionsCount)
|
||||
{
|
||||
std::vector<char const *> result;
|
||||
result.reserve(props.size());
|
||||
for (uint32_t i = 0; i < extensionsCount; ++i)
|
||||
{
|
||||
if (!enableDiagnostics)
|
||||
{
|
||||
if (strcmp(extensions[i], kDebugReportExtension) == 0)
|
||||
continue;
|
||||
|
||||
if (strcmp(extensions[i], kValidationFeaturesExtension) == 0)
|
||||
continue;
|
||||
}
|
||||
|
||||
auto const it = std::find_if(props.begin(), props.end(), [i, extensions](VkExtensionProperties const & p)
|
||||
{ return strcmp(extensions[i], p.extensionName) == 0; });
|
||||
if (it != props.end())
|
||||
result.push_back(extensions[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string GetReportObjectTypeString(VkDebugReportObjectTypeEXT objectType)
|
||||
{
|
||||
switch (objectType)
|
||||
{
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT: return "UNKNOWN";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: return "INSTANCE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: return "PHYSICAL_DEVICE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: return "DEVICE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: return "QUEUE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: return "SEMAPHORE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: return "COMMAND_BUFFER";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: return "FENCE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: return "DEVICE_MEMORY";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: return "BUFFER";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: return "IMAGE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: return "EVENT";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: return "QUERY";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: return "BUFFER_VIEW";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: return "IMAGE_VIEW";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: return "SHADER_MODULE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: return "PIPELINE_CACHE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: return "PIPELINE_LAYOUT";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: return "RENDER_PASS";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: return "PIPELINE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: return "DESCRIPTOR_SET_LAYOUT";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: return "SAMPLER";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: return "DESCRIPTOR_POOL";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: return "DESCRIPTOR_SET";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: return "FRAMEBUFFER";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: return "COMMAND_POOL";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: return "SURFACE_KHR";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: return "SWAPCHAIN_KHR";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: return "DEBUG_REPORT_CALLBACK_EXT";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: return "DISPLAY_KHR";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: return "DISPLAY_MODE_KHR";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: return "VALIDATION_CACHE_EXT";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: return "SAMPLER_YCBCR_CONVERSION";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: return "DESCRIPTOR_UPDATE_TEMPLATE";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: return "ACCELERATION_STRUCTURE_NV";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT: return "MAX_ENUM";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT: return "CU_MODULE_NVX";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT: return "CU_FUNCTION_NVX";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT: return "ACCELERATION_STRUCTURE_KHR";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT: return "BUFFER_COLLECTION_FUCHSIA";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT: return "CUDA_MODULE_NV";
|
||||
case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT: return "CUDA_FUNCTION_NV";
|
||||
}
|
||||
UNREACHABLE();
|
||||
return {};
|
||||
}
|
||||
|
||||
bool IsContained(char const * name, std::vector<char const *> const & collection)
|
||||
{
|
||||
return collection.end() !=
|
||||
std::find_if(collection.begin(), collection.end(), [name](char const * v) { return strcmp(name, v) == 0; });
|
||||
}
|
||||
} // namespace
|
||||
|
||||
static VkBool32 VKAPI_PTR DebugReportCallbackImpl(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType,
|
||||
uint64_t object, size_t location, int32_t /*messageCode*/,
|
||||
char const * pLayerPrefix, char const * pMessage,
|
||||
void * /*pUserData*/)
|
||||
{
|
||||
auto logLevel = base::LogLevel::LINFO;
|
||||
if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) || (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT))
|
||||
logLevel = base::LogLevel::LWARNING;
|
||||
else if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
|
||||
logLevel = base::LogLevel::LERROR;
|
||||
#ifdef ENABLE_VULKAN_DEBUG_DIAGNOSTICS_MESSAGES
|
||||
else if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
|
||||
logLevel = base::LogLevel::LDEBUG;
|
||||
#else
|
||||
else
|
||||
return VK_FALSE;
|
||||
#endif
|
||||
|
||||
LOG(logLevel, ("Vulkan Diagnostics [", pLayerPrefix, "] [", GetReportObjectTypeString(objectType), "] [OBJ:", object,
|
||||
"LOC:", location, "]:", pMessage));
|
||||
return VK_FALSE;
|
||||
}
|
||||
|
||||
Layers::Layers(bool enableDiagnostics)
|
||||
: m_enableDiagnostics(enableDiagnostics)
|
||||
, m_vkCreateDebugReportCallbackEXT(vkCreateDebugReportCallbackEXT)
|
||||
, m_vkDestroyDebugReportCallbackEXT(vkDestroyDebugReportCallbackEXT)
|
||||
, m_vkDebugReportMessageEXT(vkDebugReportMessageEXT)
|
||||
{
|
||||
if (m_enableDiagnostics)
|
||||
{
|
||||
// Get instance layers count.
|
||||
uint32_t instLayerCount = 0;
|
||||
auto statusCode = vkEnumerateInstanceLayerProperties(&instLayerCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceLayerProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
|
||||
// Enumerate instance layers.
|
||||
std::vector<VkLayerProperties> layerProperties;
|
||||
if (instLayerCount != 0)
|
||||
{
|
||||
layerProperties.resize(instLayerCount);
|
||||
statusCode = vkEnumerateInstanceLayerProperties(&instLayerCount, layerProperties.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceLayerProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
m_instanceLayers = CheckLayers(layerProperties);
|
||||
|
||||
for (auto layer : m_instanceLayers)
|
||||
LOG(LDEBUG, ("Vulkan instance layer prepared", layer));
|
||||
}
|
||||
}
|
||||
|
||||
// Get instance extensions count.
|
||||
uint32_t instExtensionsCount = 0;
|
||||
auto statusCode = vkEnumerateInstanceExtensionProperties(nullptr, &instExtensionsCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceExtensionProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
|
||||
// Enumerate instance extensions.
|
||||
std::vector<VkExtensionProperties> extensionsProperties;
|
||||
if (instExtensionsCount != 0)
|
||||
{
|
||||
extensionsProperties.resize(instExtensionsCount);
|
||||
statusCode = vkEnumerateInstanceExtensionProperties(nullptr, &instExtensionsCount, extensionsProperties.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceExtensionProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Enumerate instance extensions for each layer.
|
||||
for (auto layerName : m_instanceLayers)
|
||||
{
|
||||
uint32_t cnt = 0;
|
||||
statusCode = vkEnumerateInstanceExtensionProperties(layerName, &cnt, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceExtensionProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
if (cnt == 0)
|
||||
continue;
|
||||
|
||||
std::vector<VkExtensionProperties> props(cnt);
|
||||
statusCode = vkEnumerateInstanceExtensionProperties(layerName, &cnt, props.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateInstanceExtensionProperties, statusCode);
|
||||
return;
|
||||
}
|
||||
extensionsProperties.insert(extensionsProperties.end(), props.begin(), props.end());
|
||||
}
|
||||
|
||||
m_instanceExtensions =
|
||||
CheckExtensions(extensionsProperties, m_enableDiagnostics, kInstanceExtensions, ARRAY_SIZE(kInstanceExtensions));
|
||||
|
||||
for (auto ext : m_instanceExtensions)
|
||||
{
|
||||
if (strcmp(ext, kValidationFeaturesExtension) == 0)
|
||||
m_validationFeaturesEnabled = true;
|
||||
|
||||
LOG(LINFO, ("Vulkan instance extension prepared", ext));
|
||||
}
|
||||
|
||||
if (m_enableDiagnostics && !IsContained(kDebugReportExtension, m_instanceExtensions))
|
||||
LOG(LWARNING, ("Vulkan diagnostics in not available on this device."));
|
||||
}
|
||||
|
||||
uint32_t Layers::GetInstanceLayersCount() const
|
||||
{
|
||||
if (!m_enableDiagnostics)
|
||||
return 0;
|
||||
|
||||
return static_cast<uint32_t>(m_instanceLayers.size());
|
||||
}
|
||||
|
||||
char const * const * Layers::GetInstanceLayers() const
|
||||
{
|
||||
if (!m_enableDiagnostics)
|
||||
return nullptr;
|
||||
|
||||
return m_instanceLayers.data();
|
||||
}
|
||||
|
||||
uint32_t Layers::GetInstanceExtensionsCount() const
|
||||
{
|
||||
return static_cast<uint32_t>(m_instanceExtensions.size());
|
||||
}
|
||||
|
||||
char const * const * Layers::GetInstanceExtensions() const
|
||||
{
|
||||
return m_instanceExtensions.data();
|
||||
}
|
||||
|
||||
bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
|
||||
{
|
||||
if (m_enableDiagnostics)
|
||||
{
|
||||
// Get device layers count.
|
||||
uint32_t devLayerCount = 0;
|
||||
auto statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceLayerProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Enumerate device layers.
|
||||
std::vector<VkLayerProperties> layerProperties;
|
||||
if (devLayerCount != 0)
|
||||
{
|
||||
layerProperties.resize(devLayerCount);
|
||||
statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount, layerProperties.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceLayerProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
m_deviceLayers = CheckLayers(layerProperties);
|
||||
|
||||
for (auto layer : m_deviceLayers)
|
||||
LOG(LDEBUG, ("Vulkan device layer prepared", layer));
|
||||
}
|
||||
}
|
||||
|
||||
// Get device extensions count.
|
||||
uint32_t devExtensionsCount = 0;
|
||||
auto statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr, &devExtensionsCount, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Enumerate device extensions.
|
||||
std::vector<VkExtensionProperties> extensionsProperties;
|
||||
if (devExtensionsCount != 0)
|
||||
{
|
||||
extensionsProperties.resize(devExtensionsCount);
|
||||
statusCode =
|
||||
vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr, &devExtensionsCount, extensionsProperties.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Enumerate device extensions for each layer.
|
||||
for (auto layerName : m_deviceLayers)
|
||||
{
|
||||
uint32_t cnt = 0;
|
||||
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName, &cnt, nullptr);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
if (cnt == 0)
|
||||
continue;
|
||||
|
||||
std::vector<VkExtensionProperties> props(cnt);
|
||||
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName, &cnt, props.data());
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
|
||||
return false;
|
||||
}
|
||||
extensionsProperties.insert(extensionsProperties.end(), props.begin(), props.end());
|
||||
}
|
||||
|
||||
m_deviceExtensions =
|
||||
CheckExtensions(extensionsProperties, m_enableDiagnostics, kDeviceExtensions, ARRAY_SIZE(kDeviceExtensions));
|
||||
for (auto ext : m_deviceExtensions)
|
||||
LOG(LINFO, ("Vulkan device extension prepared", ext));
|
||||
|
||||
if (m_enableDiagnostics && IsContained(kDebugReportExtension, m_instanceExtensions))
|
||||
{
|
||||
if (m_vkCreateDebugReportCallbackEXT == nullptr)
|
||||
{
|
||||
m_vkCreateDebugReportCallbackEXT =
|
||||
(PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugReportCallbackEXT");
|
||||
m_vkDestroyDebugReportCallbackEXT =
|
||||
(PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugReportCallbackEXT");
|
||||
m_vkDebugReportMessageEXT =
|
||||
(PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(instance, "vkDebugReportMessageEXT");
|
||||
}
|
||||
|
||||
if (m_vkCreateDebugReportCallbackEXT == nullptr)
|
||||
{
|
||||
LOG_ERROR_VK("Function vkCreateDebugReportCallbackEXT is not found.");
|
||||
return false;
|
||||
}
|
||||
|
||||
VkDebugReportCallbackCreateInfoEXT dbgInfo = {};
|
||||
dbgInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
|
||||
dbgInfo.pNext = nullptr;
|
||||
dbgInfo.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
|
||||
VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT |
|
||||
VK_DEBUG_REPORT_DEBUG_BIT_EXT;
|
||||
dbgInfo.pfnCallback = DebugReportCallbackImpl;
|
||||
dbgInfo.pUserData = nullptr;
|
||||
statusCode = m_vkCreateDebugReportCallbackEXT(instance, &dbgInfo, nullptr, &m_reportCallback);
|
||||
if (statusCode != VK_SUCCESS)
|
||||
{
|
||||
LOG_ERROR_VK_CALL(vkCreateDebugReportCallbackEXT, statusCode);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Layers::Uninitialize(VkInstance instance)
|
||||
{
|
||||
if (m_reportCallback != 0 && m_vkDestroyDebugReportCallbackEXT != nullptr)
|
||||
m_vkDestroyDebugReportCallbackEXT(instance, m_reportCallback, nullptr);
|
||||
}
|
||||
|
||||
uint32_t Layers::GetDeviceLayersCount() const
|
||||
{
|
||||
if (!m_enableDiagnostics)
|
||||
return 0;
|
||||
|
||||
return static_cast<uint32_t>(m_deviceLayers.size());
|
||||
}
|
||||
|
||||
char const * const * Layers::GetDeviceLayers() const
|
||||
{
|
||||
if (!m_enableDiagnostics)
|
||||
return nullptr;
|
||||
|
||||
return m_deviceLayers.data();
|
||||
}
|
||||
|
||||
uint32_t Layers::GetDeviceExtensionsCount() const
|
||||
{
|
||||
return static_cast<uint32_t>(m_deviceExtensions.size());
|
||||
}
|
||||
|
||||
char const * const * Layers::GetDeviceExtensions() const
|
||||
{
|
||||
return m_deviceExtensions.data();
|
||||
}
|
||||
|
||||
bool Layers::IsValidationFeaturesEnabled() const
|
||||
{
|
||||
return m_validationFeaturesEnabled;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
52
libs/drape/vulkan/vulkan_layers.hpp
Normal file
52
libs/drape/vulkan/vulkan_layers.hpp
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
#pragma once
|
||||
|
||||
#include <vulkan_wrapper.h>
|
||||
|
||||
#include <cstdint>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class Layers
|
||||
{
|
||||
public:
|
||||
explicit Layers(bool enableDiagnostics);
|
||||
|
||||
// These methods are available before initialization.
|
||||
uint32_t GetInstanceLayersCount() const;
|
||||
char const * const * GetInstanceLayers() const;
|
||||
uint32_t GetInstanceExtensionsCount() const;
|
||||
char const * const * GetInstanceExtensions() const;
|
||||
|
||||
bool Initialize(VkInstance instance, VkPhysicalDevice physicalDevice);
|
||||
void Uninitialize(VkInstance instance);
|
||||
|
||||
// These methods are available after initialization.
|
||||
uint32_t GetDeviceLayersCount() const;
|
||||
char const * const * GetDeviceLayers() const;
|
||||
uint32_t GetDeviceExtensionsCount() const;
|
||||
char const * const * GetDeviceExtensions() const;
|
||||
|
||||
bool IsValidationFeaturesEnabled() const;
|
||||
|
||||
private:
|
||||
bool const m_enableDiagnostics;
|
||||
|
||||
std::vector<char const *> m_instanceLayers;
|
||||
std::vector<char const *> m_instanceExtensions;
|
||||
|
||||
std::vector<char const *> m_deviceLayers;
|
||||
std::vector<char const *> m_deviceExtensions;
|
||||
|
||||
VkDebugReportCallbackEXT m_reportCallback{0};
|
||||
|
||||
PFN_vkCreateDebugReportCallbackEXT m_vkCreateDebugReportCallbackEXT = nullptr;
|
||||
PFN_vkDestroyDebugReportCallbackEXT m_vkDestroyDebugReportCallbackEXT = nullptr;
|
||||
PFN_vkDebugReportMessageEXT m_vkDebugReportMessageEXT = nullptr;
|
||||
|
||||
bool m_validationFeaturesEnabled = false;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
350
libs/drape/vulkan/vulkan_memory_manager.cpp
Normal file
350
libs/drape/vulkan/vulkan_memory_manager.cpp
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
#include "drape/vulkan/vulkan_memory_manager.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/math.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> constexpr kMinBlockSizeInBytes = {{
|
||||
1024 * 1024, // Geometry
|
||||
128 * 1024, // Uniform
|
||||
0, // Staging (no minimal size)
|
||||
0, // Image (no minimal size)
|
||||
}};
|
||||
|
||||
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> constexpr kDesiredSizeInBytes = {{
|
||||
80 * 1024 * 1024, // Geometry
|
||||
std::numeric_limits<uint32_t>::max(), // Uniform (unlimited)
|
||||
20 * 1024 * 1024, // Staging
|
||||
100 * 1024 * 1024, // Image
|
||||
}};
|
||||
|
||||
VkMemoryPropertyFlags GetMemoryPropertyFlags(VulkanMemoryManager::ResourceType resourceType,
|
||||
std::optional<VkMemoryPropertyFlags> & fallbackTypeBits)
|
||||
{
|
||||
switch (resourceType)
|
||||
{
|
||||
case VulkanMemoryManager::ResourceType::Geometry:
|
||||
fallbackTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
||||
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
||||
|
||||
case VulkanMemoryManager::ResourceType::Staging:
|
||||
// No fallback.
|
||||
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
||||
|
||||
case VulkanMemoryManager::ResourceType::Uniform:
|
||||
// No fallback.
|
||||
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
||||
|
||||
case VulkanMemoryManager::ResourceType::Image:
|
||||
// No fallback.
|
||||
return VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
||||
|
||||
case VulkanMemoryManager::ResourceType::Count: CHECK(false, ());
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
struct LessBlockSize
|
||||
{
|
||||
using BlockPtrT = drape_ptr<VulkanMemoryManager::MemoryBlock>;
|
||||
|
||||
bool operator()(BlockPtrT const & b1, BlockPtrT const & b2) const { return b1->m_blockSize < b2->m_blockSize; }
|
||||
bool operator()(BlockPtrT const & b1, uint32_t b2) const { return b1->m_blockSize < b2; }
|
||||
bool operator()(uint32_t b1, BlockPtrT const & b2) const { return b1 < b2->m_blockSize; }
|
||||
};
|
||||
} // namespace
|
||||
|
||||
VulkanMemoryManager::VulkanMemoryManager(VkDevice device, VkPhysicalDeviceLimits const & deviceLimits,
|
||||
VkPhysicalDeviceMemoryProperties const & memoryProperties)
|
||||
: m_device(device)
|
||||
, m_deviceLimits(deviceLimits)
|
||||
, m_memoryProperties(memoryProperties)
|
||||
{}
|
||||
|
||||
VulkanMemoryManager::~VulkanMemoryManager()
|
||||
{
|
||||
for (size_t i = 0; i < kResourcesCount; ++i)
|
||||
{
|
||||
for (auto const & b : m_freeBlocks[i])
|
||||
{
|
||||
DecrementTotalAllocationsCount();
|
||||
vkFreeMemory(m_device, b->m_memory, nullptr);
|
||||
}
|
||||
|
||||
for (auto const & p : m_memory[i])
|
||||
{
|
||||
for (auto const & b : p.second)
|
||||
{
|
||||
DecrementTotalAllocationsCount();
|
||||
vkFreeMemory(m_device, b->m_memory, nullptr);
|
||||
}
|
||||
}
|
||||
}
|
||||
ASSERT_EQUAL(m_totalAllocationCounter, 0, ());
|
||||
}
|
||||
|
||||
std::optional<uint32_t> VulkanMemoryManager::GetMemoryTypeIndex(uint32_t typeBits,
|
||||
VkMemoryPropertyFlags properties) const
|
||||
{
|
||||
for (uint32_t i = 0; i < m_memoryProperties.memoryTypeCount; i++)
|
||||
{
|
||||
if ((typeBits & 1) == 1)
|
||||
{
|
||||
if ((m_memoryProperties.memoryTypes[i].propertyFlags & properties) == properties)
|
||||
return i;
|
||||
}
|
||||
typeBits >>= 1;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
uint32_t VulkanMemoryManager::GetOffsetAlignment(ResourceType resourceType) const
|
||||
{
|
||||
if (resourceType == ResourceType::Uniform)
|
||||
{
|
||||
static uint32_t const kUniformAlignment =
|
||||
math::LCM(static_cast<uint32_t>(m_deviceLimits.minUniformBufferOffsetAlignment),
|
||||
static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
|
||||
return kUniformAlignment;
|
||||
}
|
||||
|
||||
static uint32_t const kAlignment = math::LCM(static_cast<uint32_t>(m_deviceLimits.minMemoryMapAlignment),
|
||||
static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
|
||||
return kAlignment;
|
||||
}
|
||||
|
||||
uint32_t VulkanMemoryManager::GetSizeAlignment(VkMemoryRequirements const & memReqs) const
|
||||
{
|
||||
return math::LCM(static_cast<uint32_t>(memReqs.alignment), static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
|
||||
}
|
||||
|
||||
// static
|
||||
uint32_t VulkanMemoryManager::GetAligned(uint32_t value, uint32_t alignment)
|
||||
{
|
||||
if (alignment == 0)
|
||||
return value;
|
||||
return (value + alignment - 1) & ~(alignment - 1);
|
||||
}
|
||||
|
||||
VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType resourceType,
|
||||
VkMemoryRequirements memReqs, uint64_t blockHash)
|
||||
{
|
||||
size_t const intResType = static_cast<size_t>(resourceType);
|
||||
auto const alignedSize = GetAligned(static_cast<uint32_t>(memReqs.size), GetSizeAlignment(memReqs));
|
||||
// Looking for an existed block.
|
||||
{
|
||||
auto & m = m_memory[intResType];
|
||||
auto const it = m.find(blockHash);
|
||||
if (it != m.end())
|
||||
{
|
||||
CHECK(!it->second.empty(), ());
|
||||
auto & block = it->second.back();
|
||||
auto const alignedOffset = GetAligned(block->m_freeOffset, GetOffsetAlignment(resourceType));
|
||||
|
||||
// There is space in the current block.
|
||||
if (!block->m_isBlocked && (block->m_blockSize >= alignedOffset + alignedSize))
|
||||
{
|
||||
block->m_freeOffset = alignedOffset + alignedSize;
|
||||
block->m_allocationCounter++;
|
||||
return std::make_shared<Allocation>(resourceType, blockHash, alignedOffset, alignedSize, make_ref(block));
|
||||
}
|
||||
}
|
||||
|
||||
// Looking for a block in free ones.
|
||||
auto & fm = m_freeBlocks[intResType];
|
||||
// Free blocks array must be sorted by size.
|
||||
auto const freeBlockIt = std::lower_bound(fm.begin(), fm.end(), alignedSize, LessBlockSize());
|
||||
if (freeBlockIt != fm.end())
|
||||
{
|
||||
drape_ptr<MemoryBlock> freeBlock = std::move(*freeBlockIt);
|
||||
CHECK_EQUAL(freeBlock->m_allocationCounter, 0, ());
|
||||
CHECK_EQUAL(freeBlock->m_freeOffset, 0, ());
|
||||
CHECK_LESS_OR_EQUAL(alignedSize, freeBlock->m_blockSize, ());
|
||||
CHECK(!freeBlock->m_isBlocked, ());
|
||||
fm.erase(freeBlockIt);
|
||||
|
||||
freeBlock->m_freeOffset = alignedSize;
|
||||
freeBlock->m_allocationCounter++;
|
||||
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize, make_ref(freeBlock));
|
||||
|
||||
m[blockHash].push_back(std::move(freeBlock));
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
// Looking for memory index by memory properties.
|
||||
std::optional<VkMemoryPropertyFlags> fallbackFlags;
|
||||
auto flags = GetMemoryPropertyFlags(resourceType, fallbackFlags);
|
||||
auto memoryTypeIndex = GetMemoryTypeIndex(memReqs.memoryTypeBits, flags);
|
||||
if (!memoryTypeIndex && fallbackFlags)
|
||||
{
|
||||
flags = *fallbackFlags;
|
||||
memoryTypeIndex = GetMemoryTypeIndex(memReqs.memoryTypeBits, flags);
|
||||
}
|
||||
|
||||
CHECK(memoryTypeIndex, ("Unsupported memory allocation configuration."));
|
||||
|
||||
// Create new memory block.
|
||||
auto const blockSize = std::max(kMinBlockSizeInBytes[intResType], alignedSize);
|
||||
VkDeviceMemory memory = {};
|
||||
VkMemoryAllocateInfo memAllocInfo = {};
|
||||
memAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
||||
memAllocInfo.pNext = nullptr;
|
||||
memAllocInfo.allocationSize = blockSize;
|
||||
memAllocInfo.memoryTypeIndex = *memoryTypeIndex;
|
||||
IncrementTotalAllocationsCount();
|
||||
|
||||
CHECK_VK_CALL_EX(vkAllocateMemory(m_device, &memAllocInfo, nullptr, &memory),
|
||||
("Requested size =", blockSize, "Allocated sizes =", m_sizes, "Total allocs =",
|
||||
m_totalAllocationCounter, m_memory[intResType].size(), m_freeBlocks[intResType].size()));
|
||||
|
||||
m_sizes[intResType] += blockSize;
|
||||
|
||||
// Attach block.
|
||||
auto & m = m_memory[intResType];
|
||||
|
||||
auto newBlock = make_unique_dp<MemoryBlock>();
|
||||
newBlock->m_memory = memory;
|
||||
newBlock->m_blockSize = blockSize;
|
||||
newBlock->m_freeOffset = alignedSize;
|
||||
newBlock->m_allocationCounter++;
|
||||
newBlock->m_isCoherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
|
||||
|
||||
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize, make_ref(newBlock));
|
||||
m[blockHash].push_back(std::move(newBlock));
|
||||
return p;
|
||||
}
|
||||
|
||||
void VulkanMemoryManager::BeginDeallocationSession()
|
||||
{
|
||||
m_isInDeallocationSession = true;
|
||||
m_deallocationSessionMask = 0;
|
||||
}
|
||||
|
||||
void VulkanMemoryManager::Deallocate(AllocationPtr ptr)
|
||||
{
|
||||
CHECK(ptr, ());
|
||||
CHECK(!ptr->m_memoryBlock->m_isBlocked, ());
|
||||
auto const resourceIndex = static_cast<size_t>(ptr->m_resourceType);
|
||||
auto & m = m_memory[resourceIndex];
|
||||
auto const it = m.find(ptr->m_blockHash);
|
||||
CHECK(it != m.end(), ());
|
||||
auto blockIt = std::find_if(it->second.begin(), it->second.end(), [&ptr](drape_ptr<MemoryBlock> const & b)
|
||||
{
|
||||
ASSERT(ptr->m_memoryBlock != nullptr, ());
|
||||
return b->m_memory == ptr->m_memoryBlock->m_memory;
|
||||
});
|
||||
CHECK(blockIt != it->second.end(), ());
|
||||
CHECK_GREATER((*blockIt)->m_allocationCounter, 0, ());
|
||||
(*blockIt)->m_allocationCounter--;
|
||||
|
||||
if ((*blockIt)->m_allocationCounter == 0)
|
||||
{
|
||||
if (m_isInDeallocationSession)
|
||||
{
|
||||
// Here we set a bit in the deallocation mask to skip the processing of untouched
|
||||
// resource collections.
|
||||
m_deallocationSessionMask |= (1 << resourceIndex);
|
||||
}
|
||||
else
|
||||
{
|
||||
drape_ptr<MemoryBlock> memoryBlock = std::move(*blockIt);
|
||||
it->second.erase(blockIt);
|
||||
if (m_sizes[resourceIndex] > kDesiredSizeInBytes[resourceIndex])
|
||||
{
|
||||
CHECK_LESS_OR_EQUAL(memoryBlock->m_blockSize, m_sizes[resourceIndex], ());
|
||||
m_sizes[resourceIndex] -= memoryBlock->m_blockSize;
|
||||
DecrementTotalAllocationsCount();
|
||||
vkFreeMemory(m_device, memoryBlock->m_memory, nullptr);
|
||||
}
|
||||
else
|
||||
{
|
||||
memoryBlock->m_freeOffset = 0;
|
||||
auto & fm = m_freeBlocks[resourceIndex];
|
||||
fm.push_back(std::move(memoryBlock));
|
||||
std::sort(fm.begin(), fm.end(), LessBlockSize());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanMemoryManager::EndDeallocationSession()
|
||||
{
|
||||
if (!m_isInDeallocationSession)
|
||||
return;
|
||||
|
||||
m_isInDeallocationSession = false;
|
||||
|
||||
for (size_t i = 0; i < kResourcesCount; ++i)
|
||||
{
|
||||
if (((m_deallocationSessionMask >> i) & 1) == 0)
|
||||
continue;
|
||||
|
||||
auto & fm = m_freeBlocks[i];
|
||||
|
||||
static std::vector<uint64_t> hashesToDelete;
|
||||
for (auto & p : m_memory[i])
|
||||
{
|
||||
auto & m = p.second;
|
||||
m.erase(std::remove_if(m.begin(), m.end(),
|
||||
[this, &fm, i](drape_ptr<MemoryBlock> & b)
|
||||
{
|
||||
if (b->m_allocationCounter != 0)
|
||||
return false;
|
||||
|
||||
if (m_sizes[i] > kDesiredSizeInBytes[i])
|
||||
{
|
||||
CHECK_LESS_OR_EQUAL(b->m_blockSize, m_sizes[i], ());
|
||||
m_sizes[i] -= b->m_blockSize;
|
||||
DecrementTotalAllocationsCount();
|
||||
vkFreeMemory(m_device, b->m_memory, nullptr);
|
||||
}
|
||||
else
|
||||
{
|
||||
auto block = std::move(b);
|
||||
block->m_freeOffset = 0;
|
||||
fm.push_back(std::move(block));
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
m.end());
|
||||
|
||||
if (m.empty())
|
||||
hashesToDelete.push_back(p.first);
|
||||
}
|
||||
|
||||
for (auto hash : hashesToDelete)
|
||||
m_memory[i].erase(hash);
|
||||
hashesToDelete.clear();
|
||||
|
||||
std::sort(fm.begin(), fm.end(), LessBlockSize());
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanMemoryManager::IncrementTotalAllocationsCount()
|
||||
{
|
||||
++m_totalAllocationCounter;
|
||||
CHECK_LESS_OR_EQUAL(m_totalAllocationCounter, m_deviceLimits.maxMemoryAllocationCount, ());
|
||||
}
|
||||
|
||||
void VulkanMemoryManager::DecrementTotalAllocationsCount()
|
||||
{
|
||||
CHECK_GREATER(m_totalAllocationCounter, 0, ());
|
||||
--m_totalAllocationCounter;
|
||||
}
|
||||
|
||||
VkPhysicalDeviceLimits const & VulkanMemoryManager::GetDeviceLimits() const
|
||||
{
|
||||
return m_deviceLimits;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
97
libs/drape/vulkan/vulkan_memory_manager.hpp
Normal file
97
libs/drape/vulkan/vulkan_memory_manager.hpp
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/pointers.hpp"
|
||||
|
||||
#include <vulkan_wrapper.h>
|
||||
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <optional>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
// NOTE: The class is not thread safe and must be externally synchronized.
|
||||
class VulkanMemoryManager
|
||||
{
|
||||
public:
|
||||
VulkanMemoryManager(VkDevice device, VkPhysicalDeviceLimits const & deviceLimits,
|
||||
VkPhysicalDeviceMemoryProperties const & memoryProperties);
|
||||
~VulkanMemoryManager();
|
||||
|
||||
enum class ResourceType : uint8_t
|
||||
{
|
||||
Geometry = 0,
|
||||
Uniform,
|
||||
Staging,
|
||||
Image,
|
||||
|
||||
Count
|
||||
};
|
||||
static size_t constexpr kResourcesCount = static_cast<uint32_t>(VulkanMemoryManager::ResourceType::Count);
|
||||
|
||||
struct MemoryBlock
|
||||
{
|
||||
VkDeviceMemory m_memory = {};
|
||||
uint32_t m_blockSize = 0;
|
||||
uint32_t m_freeOffset = 0;
|
||||
uint32_t m_allocationCounter = 0;
|
||||
bool m_isCoherent = false;
|
||||
bool m_isBlocked = false;
|
||||
};
|
||||
|
||||
struct Allocation
|
||||
{
|
||||
uint64_t const m_blockHash;
|
||||
uint32_t const m_alignedOffset;
|
||||
uint32_t const m_alignedSize;
|
||||
ResourceType const m_resourceType;
|
||||
ref_ptr<MemoryBlock> m_memoryBlock;
|
||||
|
||||
Allocation(ResourceType resourceType, uint64_t blockHash, uint32_t offset, uint32_t size,
|
||||
ref_ptr<MemoryBlock> memoryBlock)
|
||||
: m_blockHash(blockHash)
|
||||
, m_alignedOffset(offset)
|
||||
, m_alignedSize(size)
|
||||
, m_resourceType(resourceType)
|
||||
, m_memoryBlock(memoryBlock)
|
||||
{}
|
||||
};
|
||||
|
||||
using AllocationPtr = std::shared_ptr<Allocation>;
|
||||
|
||||
AllocationPtr Allocate(ResourceType resourceType, VkMemoryRequirements memReqs, uint64_t blockHash);
|
||||
void BeginDeallocationSession();
|
||||
void Deallocate(AllocationPtr ptr);
|
||||
void EndDeallocationSession();
|
||||
|
||||
uint32_t GetOffsetAlignment(ResourceType resourceType) const;
|
||||
uint32_t GetSizeAlignment(VkMemoryRequirements const & memReqs) const;
|
||||
static uint32_t GetAligned(uint32_t value, uint32_t alignment);
|
||||
|
||||
VkPhysicalDeviceLimits const & GetDeviceLimits() const;
|
||||
|
||||
private:
|
||||
std::optional<uint32_t> GetMemoryTypeIndex(uint32_t typeBits, VkMemoryPropertyFlags properties) const;
|
||||
void IncrementTotalAllocationsCount();
|
||||
void DecrementTotalAllocationsCount();
|
||||
|
||||
VkDevice const m_device;
|
||||
VkPhysicalDeviceLimits const m_deviceLimits;
|
||||
VkPhysicalDeviceMemoryProperties const m_memoryProperties;
|
||||
bool m_isInDeallocationSession = false;
|
||||
uint32_t m_deallocationSessionMask = 0;
|
||||
uint32_t m_totalAllocationCounter = 0;
|
||||
|
||||
using MemoryBlocks = std::vector<drape_ptr<MemoryBlock>>;
|
||||
std::array<std::unordered_map<uint64_t, MemoryBlocks>, kResourcesCount> m_memory;
|
||||
std::array<MemoryBlocks, kResourcesCount> m_freeBlocks;
|
||||
std::array<uint32_t, kResourcesCount> m_sizes = {};
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
251
libs/drape/vulkan/vulkan_mesh_object_impl.cpp
Normal file
251
libs/drape/vulkan/vulkan_mesh_object_impl.cpp
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
#include "drape/mesh_object.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
#include "drape/vulkan/vulkan_param_descriptor.hpp"
|
||||
#include "drape/vulkan/vulkan_staging_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/buffer_vector.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
VkPrimitiveTopology GetPrimitiveType(MeshObject::DrawPrimitive primitive)
|
||||
{
|
||||
switch (primitive)
|
||||
{
|
||||
case MeshObject::DrawPrimitive::Triangles: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
|
||||
case MeshObject::DrawPrimitive::TriangleStrip: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
|
||||
case MeshObject::DrawPrimitive::LineStrip: return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
} // namespace
|
||||
|
||||
class VulkanMeshObjectImpl : public MeshObjectImpl
|
||||
{
|
||||
public:
|
||||
VulkanMeshObjectImpl(ref_ptr<VulkanObjectManager> objectManager, ref_ptr<dp::MeshObject> mesh)
|
||||
: m_mesh(std::move(mesh))
|
||||
, m_objectManager(objectManager)
|
||||
, m_descriptorUpdater(objectManager)
|
||||
{}
|
||||
|
||||
void Build(ref_ptr<dp::GraphicsContext> context, ref_ptr<dp::GpuProgram> program) override
|
||||
{
|
||||
m_geometryBuffers.resize(m_mesh->m_buffers.size());
|
||||
m_bindingInfoCount = static_cast<uint8_t>(m_mesh->m_buffers.size());
|
||||
CHECK_LESS_OR_EQUAL(m_bindingInfoCount, kMaxBindingInfo, ());
|
||||
for (size_t i = 0; i < m_mesh->m_buffers.size(); i++)
|
||||
{
|
||||
auto const sizeInBytes = m_mesh->m_buffers[i]->GetSizeInBytes();
|
||||
if (sizeInBytes == 0)
|
||||
continue;
|
||||
|
||||
m_geometryBuffers[i] =
|
||||
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, 0 /* batcherHash */);
|
||||
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_geometryBuffers[i].m_buffer,
|
||||
("VB: Mesh (" + m_mesh->m_debugName + ") " + std::to_string(i)).c_str());
|
||||
|
||||
m_objectManager->Fill(m_geometryBuffers[i], m_mesh->m_buffers[i]->GetData(), sizeInBytes);
|
||||
|
||||
m_bindingInfo[i] =
|
||||
dp::BindingInfo(static_cast<uint8_t>(m_mesh->m_buffers[i]->m_attributes.size()), static_cast<uint8_t>(i));
|
||||
for (size_t j = 0; j < m_mesh->m_buffers[i]->m_attributes.size(); ++j)
|
||||
{
|
||||
auto const & attr = m_mesh->m_buffers[i]->m_attributes[j];
|
||||
auto & binding = m_bindingInfo[i].GetBindingDecl(static_cast<uint16_t>(j));
|
||||
binding.m_attributeName = attr.m_attributeName;
|
||||
binding.m_componentCount = static_cast<uint8_t>(attr.m_componentsCount);
|
||||
binding.m_componentType = gl_const::GLFloatType;
|
||||
binding.m_offset = static_cast<uint8_t>(attr.m_offset);
|
||||
CHECK_LESS_OR_EQUAL(m_mesh->m_buffers[i]->GetStrideInBytes(),
|
||||
static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()), ());
|
||||
binding.m_stride = static_cast<uint8_t>(m_mesh->m_buffers[i]->GetStrideInBytes());
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_mesh->m_indices.empty())
|
||||
{
|
||||
auto const sizeInBytes = static_cast<uint32_t>(m_mesh->m_indices.size() * sizeof(uint16_t));
|
||||
m_indexBuffer =
|
||||
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, 0 /* batcherHash */);
|
||||
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_indexBuffer.m_buffer,
|
||||
("IB: Mesh (" + m_mesh->m_debugName + ")").c_str());
|
||||
|
||||
m_objectManager->Fill(m_indexBuffer, m_mesh->m_indices.data(), sizeInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
void Reset() override
|
||||
{
|
||||
m_descriptorUpdater.Destroy();
|
||||
for (auto const & b : m_geometryBuffers)
|
||||
m_objectManager->DestroyObject(b);
|
||||
m_geometryBuffers.clear();
|
||||
|
||||
if (m_indexBuffer.m_buffer != VK_NULL_HANDLE)
|
||||
m_objectManager->DestroyObject(m_indexBuffer);
|
||||
}
|
||||
|
||||
void UpdateBuffer(ref_ptr<dp::GraphicsContext> context, uint32_t bufferInd) override
|
||||
{
|
||||
CHECK_LESS(bufferInd, static_cast<uint32_t>(m_geometryBuffers.size()), ());
|
||||
auto & buffer = m_mesh->m_buffers[bufferInd];
|
||||
auto const sizeInBytes = buffer->GetSizeInBytes();
|
||||
CHECK(sizeInBytes != 0, ());
|
||||
|
||||
UpdateBufferInternal(context, m_geometryBuffers[bufferInd].m_buffer, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
|
||||
buffer->GetData(), sizeInBytes);
|
||||
}
|
||||
|
||||
void UpdateIndexBuffer(ref_ptr<dp::GraphicsContext> context) override
|
||||
{
|
||||
CHECK(!m_mesh->m_indices.empty(), ());
|
||||
auto const sizeInBytes = static_cast<uint32_t>(m_mesh->m_indices.size() * sizeof(uint16_t));
|
||||
CHECK(m_indexBuffer.m_buffer != VK_NULL_HANDLE, ());
|
||||
|
||||
UpdateBufferInternal(context, m_indexBuffer.m_buffer, VK_ACCESS_INDEX_READ_BIT, m_mesh->m_indices.data(),
|
||||
sizeInBytes);
|
||||
}
|
||||
|
||||
void DrawPrimitives(ref_ptr<dp::GraphicsContext> context, uint32_t vertexCount, uint32_t startVertex) override
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
BindVertexBuffers(context, commandBuffer);
|
||||
|
||||
vkCmdDraw(commandBuffer, vertexCount, 1, startVertex, 0);
|
||||
}
|
||||
|
||||
void DrawPrimitivesIndexed(ref_ptr<dp::GraphicsContext> context, uint32_t indexCount, uint32_t startIndex) override
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
BindVertexBuffers(context, commandBuffer);
|
||||
|
||||
CHECK(m_indexBuffer.m_buffer != VK_NULL_HANDLE, ());
|
||||
vkCmdBindIndexBuffer(commandBuffer, m_indexBuffer.m_buffer, 0, VK_INDEX_TYPE_UINT16);
|
||||
|
||||
vkCmdDrawIndexed(commandBuffer, indexCount, 1, startIndex, 0, 0);
|
||||
}
|
||||
|
||||
void Bind(ref_ptr<dp::GpuProgram> program) override {}
|
||||
void Unbind() override {}
|
||||
|
||||
private:
|
||||
void BindVertexBuffers(ref_ptr<dp::GraphicsContext> context, VkCommandBuffer commandBuffer)
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
|
||||
vulkanContext->SetPrimitiveTopology(GetPrimitiveType(m_mesh->m_drawPrimitive));
|
||||
vulkanContext->SetBindingInfo(m_bindingInfo, m_bindingInfoCount);
|
||||
|
||||
m_descriptorUpdater.Update(context);
|
||||
auto descriptorSet = m_descriptorUpdater.GetDescriptorSet();
|
||||
|
||||
uint32_t dynamicOffset = vulkanContext->GetCurrentDynamicBufferOffset();
|
||||
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipelineLayout(),
|
||||
0, 1, &descriptorSet, 1, &dynamicOffset);
|
||||
|
||||
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipeline());
|
||||
|
||||
buffer_vector<VkBuffer, 8> buffers;
|
||||
buffer_vector<VkDeviceSize, 8> offsets;
|
||||
for (uint32_t i = 0; i < static_cast<uint32_t>(m_geometryBuffers.size()); ++i)
|
||||
{
|
||||
buffers.emplace_back(m_geometryBuffers[i].m_buffer);
|
||||
offsets.emplace_back(0);
|
||||
}
|
||||
vkCmdBindVertexBuffers(commandBuffer, 0, static_cast<uint32_t>(m_geometryBuffers.size()), buffers.data(),
|
||||
offsets.data());
|
||||
}
|
||||
|
||||
void UpdateBufferInternal(ref_ptr<dp::GraphicsContext> context, VkBuffer buffer, VkAccessFlagBits bufferAccessMask,
|
||||
void const * data, uint32_t sizeInBytes)
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentMemoryCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
// Set up a barrier to prevent data collisions (write-after-write, write-after-read).
|
||||
VkBufferMemoryBarrier barrier = {};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
|
||||
barrier.pNext = nullptr;
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | bufferAccessMask;
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.buffer = buffer;
|
||||
barrier.offset = 0;
|
||||
barrier.size = sizeInBytes;
|
||||
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &barrier, 0, nullptr);
|
||||
|
||||
// Copy to default or temporary staging buffer.
|
||||
auto stagingBuffer = vulkanContext->GetDefaultStagingBuffer();
|
||||
if (stagingBuffer->HasEnoughSpace(sizeInBytes))
|
||||
{
|
||||
auto staging = stagingBuffer->Reserve(sizeInBytes);
|
||||
memcpy(staging.m_pointer, data, sizeInBytes);
|
||||
|
||||
// Schedule command to copy from the staging buffer to our geometry buffer.
|
||||
VkBufferCopy copyRegion = {};
|
||||
copyRegion.dstOffset = 0;
|
||||
copyRegion.srcOffset = staging.m_offset;
|
||||
copyRegion.size = sizeInBytes;
|
||||
vkCmdCopyBuffer(commandBuffer, staging.m_stagingBuffer, buffer, 1, ©Region);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Here we use temporary staging object, which will be destroyed after the nearest
|
||||
// command queue submitting.
|
||||
VulkanStagingBuffer tempStagingBuffer(m_objectManager, sizeInBytes);
|
||||
CHECK(tempStagingBuffer.HasEnoughSpace(sizeInBytes), ());
|
||||
auto staging = tempStagingBuffer.Reserve(sizeInBytes);
|
||||
memcpy(staging.m_pointer, data, sizeInBytes);
|
||||
tempStagingBuffer.Flush();
|
||||
|
||||
// Schedule command to copy from the staging buffer to our geometry buffer.
|
||||
VkBufferCopy copyRegion = {};
|
||||
copyRegion.dstOffset = 0;
|
||||
copyRegion.srcOffset = staging.m_offset;
|
||||
copyRegion.size = sizeInBytes;
|
||||
vkCmdCopyBuffer(commandBuffer, staging.m_stagingBuffer, buffer, 1, ©Region);
|
||||
}
|
||||
|
||||
// Set up a barrier to prevent data collisions (read-after-write).
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.dstAccessMask = bufferAccessMask;
|
||||
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0,
|
||||
nullptr, 1, &barrier, 0, nullptr);
|
||||
}
|
||||
|
||||
ref_ptr<dp::MeshObject> m_mesh;
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
std::vector<VulkanObject> m_geometryBuffers;
|
||||
VulkanObject m_indexBuffer;
|
||||
BindingInfoArray m_bindingInfo;
|
||||
uint8_t m_bindingInfoCount = 0;
|
||||
ParamDescriptorUpdater m_descriptorUpdater;
|
||||
};
|
||||
} // namespace vulkan
|
||||
|
||||
void MeshObject::InitForVulkan(ref_ptr<dp::GraphicsContext> context)
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
m_impl = make_unique_dp<vulkan::VulkanMeshObjectImpl>(vulkanContext->GetObjectManager(), make_ref(this));
|
||||
}
|
||||
} // namespace dp
|
||||
478
libs/drape/vulkan/vulkan_object_manager.cpp
Normal file
478
libs/drape/vulkan/vulkan_object_manager.cpp
Normal file
|
|
@ -0,0 +1,478 @@
|
|||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
|
||||
#include "drape/drape_routine.hpp"
|
||||
|
||||
#include "platform/trace.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
size_t constexpr kBackendQueueIndex = 0;
|
||||
size_t constexpr kOtherQueueIndex = 0;
|
||||
|
||||
// Maximum descriptors sets count in the pool.
|
||||
uint32_t constexpr kMaxDescriptorsSetCount = 256 * kMaxInflightFrames;
|
||||
|
||||
VkSamplerAddressMode GetVulkanSamplerAddressMode(TextureWrapping wrapping)
|
||||
{
|
||||
switch (wrapping)
|
||||
{
|
||||
case TextureWrapping::ClampToEdge: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
||||
case TextureWrapping::Repeat: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
VkFilter GetVulkanFilter(TextureFilter filter)
|
||||
{
|
||||
switch (filter)
|
||||
{
|
||||
case TextureFilter::Linear: return VK_FILTER_LINEAR;
|
||||
case TextureFilter::Nearest: return VK_FILTER_NEAREST;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
} // namespace
|
||||
|
||||
VulkanObjectManager::VulkanObjectManager(VkDevice device, VkPhysicalDeviceLimits const & deviceLimits,
|
||||
VkPhysicalDeviceMemoryProperties const & memoryProperties,
|
||||
uint32_t queueFamilyIndex)
|
||||
: m_device(device)
|
||||
, m_queueFamilyIndex(queueFamilyIndex)
|
||||
, m_memoryManager(device, deviceLimits, memoryProperties)
|
||||
{
|
||||
size_t constexpr kAvgDestroyCount = 50;
|
||||
|
||||
for (auto & q : m_queuesToDestroy[ThreadType::Frontend])
|
||||
q.reserve(kAvgDestroyCount);
|
||||
|
||||
for (auto & descriptorsToDestroy : m_descriptorsToDestroy)
|
||||
descriptorsToDestroy.reserve(kAvgDestroyCount);
|
||||
}
|
||||
|
||||
VulkanObjectManager::~VulkanObjectManager()
|
||||
{
|
||||
for (auto & descriptorsToDestroy : m_descriptorsToDestroy)
|
||||
CollectDescriptorSetGroupsUnsafe(descriptorsToDestroy);
|
||||
|
||||
for (size_t i = 0; i < ThreadType::Count; ++i)
|
||||
for (auto & q : m_queuesToDestroy[i])
|
||||
CollectObjectsImpl(q);
|
||||
|
||||
for (auto const & s : m_samplers)
|
||||
vkDestroySampler(m_device, s.second, nullptr);
|
||||
m_samplers.clear();
|
||||
|
||||
DestroyDescriptorPools();
|
||||
}
|
||||
|
||||
void VulkanObjectManager::RegisterThread(ThreadType type)
|
||||
{
|
||||
m_renderers[type] = std::this_thread::get_id();
|
||||
}
|
||||
|
||||
void VulkanObjectManager::SetCurrentInflightFrameIndex(uint32_t index)
|
||||
{
|
||||
CHECK(std::this_thread::get_id() == m_renderers[ThreadType::Frontend], ());
|
||||
CHECK_LESS(m_currentInflightFrameIndex, kMaxInflightFrames, ());
|
||||
m_currentInflightFrameIndex = index;
|
||||
}
|
||||
|
||||
VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType resourceType, uint32_t sizeInBytes,
|
||||
uint64_t batcherHash)
|
||||
{
|
||||
VulkanObject result;
|
||||
VkBufferCreateInfo info = {};
|
||||
info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
||||
info.pNext = nullptr;
|
||||
info.flags = 0;
|
||||
info.size = sizeInBytes;
|
||||
if (resourceType == VulkanMemoryManager::ResourceType::Geometry)
|
||||
{
|
||||
info.usage =
|
||||
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
||||
}
|
||||
else if (resourceType == VulkanMemoryManager::ResourceType::Uniform)
|
||||
{
|
||||
info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
|
||||
}
|
||||
else if (resourceType == VulkanMemoryManager::ResourceType::Staging)
|
||||
{
|
||||
info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK(false, ("Unsupported resource type."));
|
||||
}
|
||||
|
||||
info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
||||
info.queueFamilyIndexCount = 1;
|
||||
info.pQueueFamilyIndices = &m_queueFamilyIndex;
|
||||
CHECK_VK_CALL(vkCreateBuffer(m_device, &info, nullptr, &result.m_buffer));
|
||||
|
||||
SET_DEBUG_NAME_VK(
|
||||
VK_OBJECT_TYPE_BUFFER, result.m_buffer,
|
||||
((resourceType == VulkanMemoryManager::ResourceType::Geometry
|
||||
? "B: Geometry ("
|
||||
: (resourceType == VulkanMemoryManager::ResourceType::Uniform ? "B: Uniform (" : "B: Staging (")) +
|
||||
std::to_string(sizeInBytes) + " bytes)")
|
||||
.c_str());
|
||||
|
||||
VkMemoryRequirements memReqs = {};
|
||||
vkGetBufferMemoryRequirements(m_device, result.m_buffer, &memReqs);
|
||||
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
result.m_allocation = m_memoryManager.Allocate(resourceType, memReqs, batcherHash);
|
||||
CHECK_VK_CALL(vkBindBufferMemory(m_device, result.m_buffer, result.GetMemory(), result.GetAlignedOffset()));
|
||||
}
|
||||
|
||||
#ifdef ENABLE_TRACE
|
||||
m_buffersCount++;
|
||||
TRACE_COUNTER("[drape][vulkan] Buffers", m_buffersCount);
|
||||
#endif
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
VulkanObject VulkanObjectManager::CreateImage(VkImageUsageFlags usageFlags, VkFormat format, VkImageTiling tiling,
|
||||
VkImageAspectFlags aspectFlags, uint32_t width, uint32_t height)
|
||||
{
|
||||
VulkanObject result;
|
||||
VkImageCreateInfo imageCreateInfo = {};
|
||||
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
|
||||
imageCreateInfo.pNext = nullptr;
|
||||
imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
||||
imageCreateInfo.format = format;
|
||||
imageCreateInfo.mipLevels = 1;
|
||||
imageCreateInfo.arrayLayers = 1;
|
||||
imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
imageCreateInfo.tiling = tiling;
|
||||
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
||||
imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
imageCreateInfo.extent = {width, height, 1};
|
||||
imageCreateInfo.usage = usageFlags | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
|
||||
CHECK_VK_CALL(vkCreateImage(m_device, &imageCreateInfo, nullptr, &result.m_image));
|
||||
|
||||
VkMemoryRequirements memReqs = {};
|
||||
vkGetImageMemoryRequirements(m_device, result.m_image, &memReqs);
|
||||
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
result.m_allocation =
|
||||
m_memoryManager.Allocate(VulkanMemoryManager::ResourceType::Image, memReqs, 0 /* blockHash */);
|
||||
CHECK_VK_CALL(vkBindImageMemory(m_device, result.m_image, result.GetMemory(), result.GetAlignedOffset()));
|
||||
}
|
||||
|
||||
VkImageViewCreateInfo viewCreateInfo = {};
|
||||
viewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
|
||||
viewCreateInfo.pNext = nullptr;
|
||||
viewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
|
||||
viewCreateInfo.format = format;
|
||||
if (usageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
|
||||
{
|
||||
viewCreateInfo.components = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
|
||||
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
|
||||
}
|
||||
else
|
||||
{
|
||||
viewCreateInfo.components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B,
|
||||
VK_COMPONENT_SWIZZLE_A};
|
||||
}
|
||||
viewCreateInfo.subresourceRange.aspectMask = aspectFlags;
|
||||
viewCreateInfo.subresourceRange.baseMipLevel = 0;
|
||||
viewCreateInfo.subresourceRange.levelCount = 1;
|
||||
viewCreateInfo.subresourceRange.baseArrayLayer = 0;
|
||||
viewCreateInfo.subresourceRange.layerCount = 1;
|
||||
viewCreateInfo.image = result.m_image;
|
||||
CHECK_VK_CALL(vkCreateImageView(m_device, &viewCreateInfo, nullptr, &result.m_imageView));
|
||||
|
||||
#ifdef ENABLE_TRACE
|
||||
m_imagesCount++;
|
||||
TRACE_COUNTER("[drape][vulkan] Images", m_imagesCount);
|
||||
#endif
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
DescriptorSetGroup VulkanObjectManager::CreateDescriptorSetGroup(ref_ptr<VulkanGpuProgram> program)
|
||||
{
|
||||
CHECK(std::this_thread::get_id() == m_renderers[ThreadType::Frontend], ());
|
||||
|
||||
DescriptorSetGroup s;
|
||||
VkDescriptorSetLayout layout = program->GetDescriptorSetLayout();
|
||||
VkDescriptorSetAllocateInfo allocInfo = {};
|
||||
|
||||
// Find a pool with available sets.
|
||||
uint32_t poolIndex = 0;
|
||||
while (poolIndex <= m_descriptorPools.size())
|
||||
{
|
||||
// No such a pool, create one.
|
||||
if (poolIndex == m_descriptorPools.size())
|
||||
CreateDescriptorPool();
|
||||
|
||||
// No available sets in the pool, try next one.
|
||||
if (m_descriptorPools[poolIndex].m_availableSetsCount == 0)
|
||||
{
|
||||
poolIndex++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Allocate a descriptor set.
|
||||
allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
|
||||
allocInfo.descriptorPool = m_descriptorPools[poolIndex].m_pool;
|
||||
s.m_descriptorPoolIndex = poolIndex;
|
||||
allocInfo.pSetLayouts = &layout;
|
||||
allocInfo.descriptorSetCount = 1;
|
||||
|
||||
// Decrease the available sets count.
|
||||
m_descriptorPools[poolIndex].m_availableSetsCount--;
|
||||
|
||||
auto const r = vkAllocateDescriptorSets(m_device, &allocInfo, &s.m_descriptorSet);
|
||||
if (r == VK_ERROR_FRAGMENTED_POOL || r == VK_ERROR_OUT_OF_POOL_MEMORY)
|
||||
{
|
||||
poolIndex++;
|
||||
m_descriptorPools[poolIndex].m_availableSetsCount++;
|
||||
}
|
||||
else if (r != VK_SUCCESS)
|
||||
{
|
||||
CHECK_VK_CALL(r);
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
TRACE_COUNTER("[drape][vulkan] Descriptor pools", static_cast<int64_t>(m_descriptorPools.size()));
|
||||
#ifdef ENABLE_TRACE
|
||||
int64_t usedDescriptorsSets = 0;
|
||||
for (auto const & pool : m_descriptorPools)
|
||||
usedDescriptorsSets += (kMaxDescriptorsSetCount - pool.m_availableSetsCount);
|
||||
TRACE_COUNTER("[drape][vulkan] Descriptor sets", usedDescriptorsSets);
|
||||
#endif
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
void VulkanObjectManager::DestroyObject(VulkanObject object)
|
||||
{
|
||||
auto const currentThreadId = std::this_thread::get_id();
|
||||
if (currentThreadId == m_renderers[ThreadType::Frontend])
|
||||
{
|
||||
m_queuesToDestroy[ThreadType::Frontend][m_currentInflightFrameIndex].push_back(std::move(object));
|
||||
}
|
||||
else if (currentThreadId == m_renderers[ThreadType::Backend])
|
||||
{
|
||||
m_queuesToDestroy[ThreadType::Backend][kBackendQueueIndex].push_back(std::move(object));
|
||||
}
|
||||
else
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_destroyMutex);
|
||||
m_queuesToDestroy[ThreadType::Other][kOtherQueueIndex].push_back(std::move(object));
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanObjectManager::DestroyDescriptorSetGroup(DescriptorSetGroup group)
|
||||
{
|
||||
CHECK(std::this_thread::get_id() == m_renderers[ThreadType::Frontend], ());
|
||||
m_descriptorsToDestroy[m_currentInflightFrameIndex].push_back(std::move(group));
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CollectDescriptorSetGroups(uint32_t inflightFrameIndex)
|
||||
{
|
||||
CHECK(std::this_thread::get_id() == m_renderers[ThreadType::Frontend], ());
|
||||
CollectDescriptorSetGroupsUnsafe(m_descriptorsToDestroy[inflightFrameIndex]);
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CollectDescriptorSetGroupsUnsafe(DescriptorSetGroupArray & descriptors)
|
||||
{
|
||||
for (auto const & d : descriptors)
|
||||
{
|
||||
CHECK_LESS(d.m_descriptorPoolIndex, m_descriptorPools.size(), ());
|
||||
CHECK_VK_CALL(vkFreeDescriptorSets(m_device, m_descriptorPools[d.m_descriptorPoolIndex].m_pool, 1 /* count */,
|
||||
&d.m_descriptorSet));
|
||||
m_descriptorPools[d.m_descriptorPoolIndex].m_availableSetsCount++;
|
||||
}
|
||||
descriptors.clear();
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CollectObjects(uint32_t inflightFrameIndex)
|
||||
{
|
||||
auto const currentThreadId = std::this_thread::get_id();
|
||||
if (currentThreadId == m_renderers[ThreadType::Frontend])
|
||||
{
|
||||
CollectObjectsForThread(m_queuesToDestroy[ThreadType::Frontend][inflightFrameIndex]);
|
||||
}
|
||||
else if (currentThreadId == m_renderers[ThreadType::Backend])
|
||||
{
|
||||
CollectObjectsForThread(m_queuesToDestroy[ThreadType::Backend][kBackendQueueIndex]);
|
||||
|
||||
std::lock_guard<std::mutex> lock(m_destroyMutex);
|
||||
CollectObjectsForThread(m_queuesToDestroy[ThreadType::Other][kOtherQueueIndex]);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CollectObjectsForThread(VulkanObjectArray & objects)
|
||||
{
|
||||
if (objects.empty())
|
||||
return;
|
||||
|
||||
std::vector<VulkanObject> queueToDestroy;
|
||||
std::swap(objects, queueToDestroy);
|
||||
DrapeRoutine::Run([this, queueToDestroy = std::move(queueToDestroy)]() { CollectObjectsImpl(queueToDestroy); });
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CollectObjectsImpl(VulkanObjectArray const & objects)
|
||||
{
|
||||
for (auto const & obj : objects)
|
||||
{
|
||||
if (obj.m_buffer != VK_NULL_HANDLE)
|
||||
{
|
||||
vkDestroyBuffer(m_device, obj.m_buffer, nullptr);
|
||||
#ifdef ENABLE_TRACE
|
||||
m_buffersCount--;
|
||||
TRACE_COUNTER("[drape][vulkan] Buffers", m_buffersCount);
|
||||
#endif
|
||||
}
|
||||
if (obj.m_imageView != VK_NULL_HANDLE)
|
||||
vkDestroyImageView(m_device, obj.m_imageView, nullptr);
|
||||
if (obj.m_image != VK_NULL_HANDLE)
|
||||
{
|
||||
vkDestroyImage(m_device, obj.m_image, nullptr);
|
||||
#ifdef ENABLE_TRACE
|
||||
m_imagesCount--;
|
||||
TRACE_COUNTER("[drape][vulkan] Images", m_imagesCount);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
m_memoryManager.BeginDeallocationSession();
|
||||
for (auto const & obj : objects)
|
||||
if (obj.m_allocation)
|
||||
m_memoryManager.Deallocate(obj.m_allocation);
|
||||
m_memoryManager.EndDeallocationSession();
|
||||
}
|
||||
|
||||
void VulkanObjectManager::DestroyObjectUnsafe(VulkanObject object)
|
||||
{
|
||||
CollectObjectsImpl(VulkanObjectArray{object});
|
||||
}
|
||||
|
||||
void VulkanObjectManager::SetMaxUniformBuffers(uint32_t maxUniformBuffers)
|
||||
{
|
||||
m_maxUniformBuffers = maxUniformBuffers;
|
||||
}
|
||||
|
||||
void VulkanObjectManager::SetMaxImageSamplers(uint32_t maxImageSamplers)
|
||||
{
|
||||
m_maxImageSamplers = maxImageSamplers;
|
||||
}
|
||||
|
||||
uint8_t * VulkanObjectManager::MapUnsafe(VulkanObject object)
|
||||
{
|
||||
CHECK(!object.m_allocation->m_memoryBlock->m_isBlocked, ());
|
||||
|
||||
CHECK(object.m_buffer != VK_NULL_HANDLE || object.m_image != VK_NULL_HANDLE, ());
|
||||
uint8_t * ptr = nullptr;
|
||||
CHECK_VK_CALL(vkMapMemory(m_device, object.GetMemory(), object.GetAlignedOffset(), object.GetAlignedSize(), 0,
|
||||
reinterpret_cast<void **>(&ptr)));
|
||||
object.m_allocation->m_memoryBlock->m_isBlocked = true;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
void VulkanObjectManager::FlushUnsafe(VulkanObject object, uint32_t offset, uint32_t size)
|
||||
{
|
||||
if (object.m_allocation->m_memoryBlock->m_isCoherent)
|
||||
return;
|
||||
|
||||
CHECK(object.m_allocation->m_memoryBlock->m_isBlocked, ());
|
||||
|
||||
VkMappedMemoryRange mappedRange = {};
|
||||
mappedRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
||||
mappedRange.memory = object.GetMemory();
|
||||
mappedRange.offset = object.GetAlignedOffset() + offset;
|
||||
if (size == 0)
|
||||
mappedRange.size = object.GetAlignedSize();
|
||||
else
|
||||
mappedRange.size = size;
|
||||
CHECK_VK_CALL(vkFlushMappedMemoryRanges(m_device, 1, &mappedRange));
|
||||
}
|
||||
|
||||
void VulkanObjectManager::UnmapUnsafe(VulkanObject object)
|
||||
{
|
||||
CHECK(object.m_allocation->m_memoryBlock->m_isBlocked, ());
|
||||
vkUnmapMemory(m_device, object.GetMemory());
|
||||
object.m_allocation->m_memoryBlock->m_isBlocked = false;
|
||||
}
|
||||
|
||||
void VulkanObjectManager::Fill(VulkanObject object, void const * data, uint32_t sizeInBytes)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
void * gpuPtr = MapUnsafe(object);
|
||||
if (data != nullptr)
|
||||
memcpy(gpuPtr, data, sizeInBytes);
|
||||
else
|
||||
memset(gpuPtr, 0, sizeInBytes);
|
||||
FlushUnsafe(object);
|
||||
UnmapUnsafe(object);
|
||||
}
|
||||
|
||||
void VulkanObjectManager::CreateDescriptorPool()
|
||||
{
|
||||
CHECK_GREATER(m_maxUniformBuffers, 0, ());
|
||||
CHECK_GREATER(m_maxImageSamplers, 0, ());
|
||||
std::vector<VkDescriptorPoolSize> poolSizes = {
|
||||
{VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, m_maxUniformBuffers * kMaxDescriptorsSetCount},
|
||||
{VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, m_maxImageSamplers * kMaxDescriptorsSetCount},
|
||||
};
|
||||
|
||||
VkDescriptorPoolCreateInfo descriptorPoolInfo = {};
|
||||
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||
descriptorPoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
|
||||
descriptorPoolInfo.poolSizeCount = static_cast<uint32_t>(poolSizes.size());
|
||||
descriptorPoolInfo.pPoolSizes = poolSizes.data();
|
||||
descriptorPoolInfo.maxSets = kMaxDescriptorsSetCount;
|
||||
|
||||
DescriptorPool descriptorPool;
|
||||
CHECK_VK_CALL(vkCreateDescriptorPool(m_device, &descriptorPoolInfo, nullptr, &descriptorPool.m_pool));
|
||||
descriptorPool.m_availableSetsCount = descriptorPoolInfo.maxSets;
|
||||
|
||||
m_descriptorPools.push_back(descriptorPool);
|
||||
}
|
||||
|
||||
void VulkanObjectManager::DestroyDescriptorPools()
|
||||
{
|
||||
for (auto & pool : m_descriptorPools)
|
||||
vkDestroyDescriptorPool(m_device, pool.m_pool, nullptr);
|
||||
}
|
||||
|
||||
VkSampler VulkanObjectManager::GetSampler(SamplerKey const & key)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_samplerMutex);
|
||||
|
||||
auto const it = m_samplers.find(key);
|
||||
if (it != m_samplers.end())
|
||||
return it->second;
|
||||
|
||||
VkSamplerCreateInfo samplerCreateInfo = {};
|
||||
samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
|
||||
samplerCreateInfo.magFilter = samplerCreateInfo.minFilter = GetVulkanFilter(key.GetTextureFilter());
|
||||
samplerCreateInfo.addressModeU = GetVulkanSamplerAddressMode(key.GetWrapSMode());
|
||||
samplerCreateInfo.addressModeV = GetVulkanSamplerAddressMode(key.GetWrapTMode());
|
||||
|
||||
VkSampler sampler;
|
||||
CHECK_VK_CALL(vkCreateSampler(m_device, &samplerCreateInfo, nullptr, &sampler));
|
||||
|
||||
m_samplers.insert(std::make_pair(key, sampler));
|
||||
return sampler;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
138
libs/drape/vulkan/vulkan_object_manager.hpp
Normal file
138
libs/drape/vulkan/vulkan_object_manager.hpp
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_gpu_program.hpp"
|
||||
#include "drape/vulkan/vulkan_memory_manager.hpp"
|
||||
#include "drape/vulkan/vulkan_param_descriptor.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
struct VulkanObject
|
||||
{
|
||||
VkBuffer m_buffer = {};
|
||||
VkImage m_image = {};
|
||||
VkImageView m_imageView = {};
|
||||
VulkanMemoryManager::AllocationPtr m_allocation;
|
||||
|
||||
VkDeviceMemory GetMemory() const
|
||||
{
|
||||
ASSERT(m_allocation != nullptr, ());
|
||||
ASSERT(m_allocation->m_memoryBlock != nullptr, ());
|
||||
return m_allocation->m_memoryBlock->m_memory;
|
||||
}
|
||||
|
||||
uint32_t GetAlignedOffset() const
|
||||
{
|
||||
ASSERT(m_allocation != nullptr, ());
|
||||
return m_allocation->m_alignedOffset;
|
||||
}
|
||||
|
||||
uint32_t GetAlignedSize() const
|
||||
{
|
||||
ASSERT(m_allocation != nullptr, ());
|
||||
return m_allocation->m_alignedSize;
|
||||
}
|
||||
};
|
||||
|
||||
class VulkanObjectManager
|
||||
{
|
||||
public:
|
||||
VulkanObjectManager(VkDevice device, VkPhysicalDeviceLimits const & deviceLimits,
|
||||
VkPhysicalDeviceMemoryProperties const & memoryProperties, uint32_t queueFamilyIndex);
|
||||
~VulkanObjectManager();
|
||||
|
||||
enum ThreadType
|
||||
{
|
||||
Frontend = 0,
|
||||
Backend,
|
||||
Other,
|
||||
Count
|
||||
};
|
||||
void RegisterThread(ThreadType type);
|
||||
|
||||
void SetCurrentInflightFrameIndex(uint32_t index);
|
||||
|
||||
VulkanObject CreateBuffer(VulkanMemoryManager::ResourceType resourceType, uint32_t sizeInBytes, uint64_t batcherHash);
|
||||
VulkanObject CreateImage(VkImageUsageFlags usageFlags, VkFormat format, VkImageTiling tiling,
|
||||
VkImageAspectFlags aspectFlags, uint32_t width, uint32_t height);
|
||||
DescriptorSetGroup CreateDescriptorSetGroup(ref_ptr<VulkanGpuProgram> program);
|
||||
|
||||
// Use unsafe function ONLY if an object exists on the only thread, otherwise
|
||||
// use safe Fill function.
|
||||
uint8_t * MapUnsafe(VulkanObject object);
|
||||
void FlushUnsafe(VulkanObject object, uint32_t offset = 0, uint32_t size = 0);
|
||||
void UnmapUnsafe(VulkanObject object);
|
||||
void Fill(VulkanObject object, void const * data, uint32_t sizeInBytes);
|
||||
|
||||
void DestroyObject(VulkanObject object);
|
||||
void DestroyDescriptorSetGroup(DescriptorSetGroup group);
|
||||
void CollectDescriptorSetGroups(uint32_t inflightFrameIndex);
|
||||
void CollectObjects(uint32_t inflightFrameIndex);
|
||||
|
||||
// Use unsafe function ONLY if an object has been just created.
|
||||
void DestroyObjectUnsafe(VulkanObject object);
|
||||
|
||||
VkDevice GetDevice() const { return m_device; }
|
||||
VulkanMemoryManager const & GetMemoryManager() const { return m_memoryManager; }
|
||||
VkSampler GetSampler(SamplerKey const & key);
|
||||
|
||||
void SetMaxUniformBuffers(uint32_t maxUniformBuffers);
|
||||
void SetMaxImageSamplers(uint32_t maxImageSamplers);
|
||||
|
||||
private:
|
||||
using DescriptorSetGroupArray = std::vector<DescriptorSetGroup>;
|
||||
using VulkanObjectArray = std::vector<VulkanObject>;
|
||||
|
||||
void CreateDescriptorPool();
|
||||
void DestroyDescriptorPools();
|
||||
void CollectObjectsForThread(VulkanObjectArray & objects);
|
||||
void CollectObjectsImpl(VulkanObjectArray const & objects);
|
||||
void CollectDescriptorSetGroupsUnsafe(DescriptorSetGroupArray & descriptors);
|
||||
|
||||
VkDevice const m_device;
|
||||
uint32_t const m_queueFamilyIndex;
|
||||
VulkanMemoryManager m_memoryManager;
|
||||
|
||||
std::array<std::thread::id, ThreadType::Count> m_renderers = {};
|
||||
std::array<std::array<VulkanObjectArray, kMaxInflightFrames>, ThreadType::Count> m_queuesToDestroy = {};
|
||||
|
||||
struct DescriptorPool
|
||||
{
|
||||
VkDescriptorPool m_pool;
|
||||
uint32_t m_availableSetsCount = 0;
|
||||
};
|
||||
std::vector<DescriptorPool> m_descriptorPools;
|
||||
|
||||
std::array<DescriptorSetGroupArray, kMaxInflightFrames> m_descriptorsToDestroy;
|
||||
|
||||
std::map<SamplerKey, VkSampler> m_samplers;
|
||||
|
||||
uint32_t m_currentInflightFrameIndex = 0;
|
||||
|
||||
uint32_t m_maxUniformBuffers = 0;
|
||||
uint32_t m_maxImageSamplers = 0;
|
||||
|
||||
std::mutex m_mutex;
|
||||
std::mutex m_samplerMutex;
|
||||
std::mutex m_destroyMutex;
|
||||
|
||||
#ifdef ENABLE_TRACE
|
||||
int64_t m_buffersCount = 0;
|
||||
int64_t m_imagesCount = 0;
|
||||
#endif
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
112
libs/drape/vulkan/vulkan_param_descriptor.cpp
Normal file
112
libs/drape/vulkan/vulkan_param_descriptor.cpp
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
#include "drape/vulkan/vulkan_param_descriptor.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/logging.hpp"
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
void DescriptorSetGroup::Update(VkDevice device, std::vector<ParamDescriptor> const & descriptors)
|
||||
{
|
||||
size_t const writeDescriptorsCount = descriptors.size();
|
||||
CHECK_LESS_OR_EQUAL(writeDescriptorsCount, kMaxDescriptorSets, ());
|
||||
|
||||
std::array<uint32_t, kMaxDescriptorSets> ids = {};
|
||||
for (size_t i = 0; i < writeDescriptorsCount; ++i)
|
||||
ids[i] = descriptors[i].m_id;
|
||||
|
||||
if (m_updated && ids == m_ids)
|
||||
return;
|
||||
|
||||
m_ids = ids;
|
||||
m_updated = true;
|
||||
std::array<VkWriteDescriptorSet, kMaxDescriptorSets> writeDescriptorSets = {};
|
||||
for (size_t i = 0; i < writeDescriptorsCount; ++i)
|
||||
{
|
||||
writeDescriptorSets[i] = {};
|
||||
writeDescriptorSets[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
||||
writeDescriptorSets[i].dstSet = m_descriptorSet;
|
||||
writeDescriptorSets[i].descriptorCount = 1;
|
||||
if (descriptors[i].m_type == ParamDescriptor::Type::DynamicUniformBuffer)
|
||||
{
|
||||
writeDescriptorSets[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
|
||||
writeDescriptorSets[i].dstBinding = 0;
|
||||
writeDescriptorSets[i].pBufferInfo = &descriptors[i].m_bufferDescriptor;
|
||||
}
|
||||
else if (descriptors[i].m_type == ParamDescriptor::Type::Texture)
|
||||
{
|
||||
writeDescriptorSets[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
||||
writeDescriptorSets[i].dstBinding = static_cast<uint32_t>(descriptors[i].m_textureSlot);
|
||||
writeDescriptorSets[i].pImageInfo = &descriptors[i].m_imageDescriptor;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK(false, ("Unsupported param descriptor type."));
|
||||
}
|
||||
}
|
||||
|
||||
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorsCount), writeDescriptorSets.data(), 0, nullptr);
|
||||
}
|
||||
|
||||
ParamDescriptorUpdater::ParamDescriptorUpdater(ref_ptr<VulkanObjectManager> objectManager)
|
||||
: m_objectManager(std::move(objectManager))
|
||||
{}
|
||||
|
||||
void ParamDescriptorUpdater::Reset(uint32_t inflightFrameIndex)
|
||||
{
|
||||
auto & ud = m_updateData[inflightFrameIndex];
|
||||
for (auto const & g : ud.m_descriptorSetGroups)
|
||||
m_objectManager->DestroyDescriptorSetGroup(g);
|
||||
ud.m_descriptorSetGroups.clear();
|
||||
ud.m_descriptorSetIndex = 0;
|
||||
ud.m_updateDescriptorFrame = 0;
|
||||
}
|
||||
|
||||
void ParamDescriptorUpdater::Destroy()
|
||||
{
|
||||
for (size_t i = 0; i < m_updateData.size(); ++i)
|
||||
Reset(static_cast<uint32_t>(i));
|
||||
}
|
||||
|
||||
void ParamDescriptorUpdater::Update(ref_ptr<dp::GraphicsContext> context)
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
m_currentInflightFrameIndex = vulkanContext->GetCurrentInflightFrameIndex();
|
||||
auto & ud = m_updateData[m_currentInflightFrameIndex];
|
||||
|
||||
if (ud.m_program != vulkanContext->GetCurrentProgram())
|
||||
{
|
||||
Reset(m_currentInflightFrameIndex);
|
||||
ud.m_program = vulkanContext->GetCurrentProgram();
|
||||
}
|
||||
|
||||
// We can update descriptors only once per frame. So if we need to render
|
||||
// object several times per frame, we must allocate new descriptors.
|
||||
if (ud.m_updateDescriptorFrame != vulkanContext->GetCurrentFrameIndex())
|
||||
{
|
||||
ud.m_updateDescriptorFrame = vulkanContext->GetCurrentFrameIndex();
|
||||
ud.m_descriptorSetIndex = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
ud.m_descriptorSetIndex++;
|
||||
}
|
||||
|
||||
CHECK_LESS_OR_EQUAL(ud.m_descriptorSetIndex, ud.m_descriptorSetGroups.size(), ());
|
||||
if (ud.m_descriptorSetIndex == ud.m_descriptorSetGroups.size())
|
||||
ud.m_descriptorSetGroups.emplace_back(m_objectManager->CreateDescriptorSetGroup(ud.m_program));
|
||||
|
||||
ud.m_descriptorSetGroups[ud.m_descriptorSetIndex].Update(vulkanContext->GetDevice(),
|
||||
vulkanContext->GetCurrentParamDescriptors());
|
||||
}
|
||||
|
||||
VkDescriptorSet ParamDescriptorUpdater::GetDescriptorSet() const
|
||||
{
|
||||
auto const & ud = m_updateData[m_currentInflightFrameIndex];
|
||||
return ud.m_descriptorSetGroups[ud.m_descriptorSetIndex].m_descriptorSet;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
81
libs/drape/vulkan/vulkan_param_descriptor.hpp
Normal file
81
libs/drape/vulkan/vulkan_param_descriptor.hpp
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/graphics_context.hpp"
|
||||
#include "drape/vulkan/vulkan_gpu_program.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
struct ParamDescriptor
|
||||
{
|
||||
enum class Type : uint8_t
|
||||
{
|
||||
DynamicUniformBuffer = 0,
|
||||
Texture
|
||||
};
|
||||
|
||||
Type m_type = Type::DynamicUniformBuffer;
|
||||
|
||||
VkDescriptorBufferInfo m_bufferDescriptor = {};
|
||||
uint32_t m_bufferDynamicOffset = 0;
|
||||
|
||||
VkDescriptorImageInfo m_imageDescriptor = {};
|
||||
int8_t m_textureSlot = 0;
|
||||
|
||||
uint32_t m_id = 0;
|
||||
};
|
||||
|
||||
size_t constexpr kMaxDescriptorSets = 8;
|
||||
|
||||
struct DescriptorSetGroup
|
||||
{
|
||||
VkDescriptorSet m_descriptorSet = {};
|
||||
uint32_t m_descriptorPoolIndex = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
std::array<uint32_t, kMaxDescriptorSets> m_ids = {};
|
||||
bool m_updated = false;
|
||||
|
||||
explicit operator bool()
|
||||
{
|
||||
return m_descriptorSet != VK_NULL_HANDLE && m_descriptorPoolIndex != std::numeric_limits<uint32_t>::max();
|
||||
}
|
||||
|
||||
void Update(VkDevice device, std::vector<ParamDescriptor> const & descriptors);
|
||||
};
|
||||
|
||||
class VulkanObjectManager;
|
||||
|
||||
class ParamDescriptorUpdater
|
||||
{
|
||||
public:
|
||||
explicit ParamDescriptorUpdater(ref_ptr<VulkanObjectManager> objectManager);
|
||||
|
||||
void Update(ref_ptr<dp::GraphicsContext> context);
|
||||
void Destroy();
|
||||
VkDescriptorSet GetDescriptorSet() const;
|
||||
|
||||
private:
|
||||
void Reset(uint32_t inflightFrameIndex);
|
||||
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
struct UpdateData
|
||||
{
|
||||
std::vector<DescriptorSetGroup> m_descriptorSetGroups;
|
||||
ref_ptr<VulkanGpuProgram> m_program;
|
||||
uint32_t m_updateDescriptorFrame = 0;
|
||||
uint32_t m_descriptorSetIndex = 0;
|
||||
};
|
||||
std::array<UpdateData, kMaxInflightFrames> m_updateData;
|
||||
uint32_t m_currentInflightFrameIndex = 0;
|
||||
};
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
565
libs/drape/vulkan/vulkan_pipeline.cpp
Normal file
565
libs/drape/vulkan/vulkan_pipeline.cpp
Normal file
|
|
@ -0,0 +1,565 @@
|
|||
#include "drape/vulkan/vulkan_pipeline.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "drape/support_manager.hpp"
|
||||
|
||||
#include "platform/platform.hpp"
|
||||
|
||||
#include "coding/file_reader.hpp"
|
||||
#include "coding/file_writer.hpp"
|
||||
#include "coding/write_to_sink.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/file_name_utils.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
std::string const kDumpFileName = "vulkan_dump.bin";
|
||||
|
||||
// Stencil package.
|
||||
uint8_t constexpr kStencilBackFunctionByte = 7;
|
||||
uint8_t constexpr kStencilBackFailActionByte = 6;
|
||||
uint8_t constexpr kStencilBackDepthFailActionByte = 5;
|
||||
uint8_t constexpr kStencilBackPassActionByte = 4;
|
||||
uint8_t constexpr kStencilFrontFunctionByte = 3;
|
||||
uint8_t constexpr kStencilFrontFailActionByte = 2;
|
||||
uint8_t constexpr kStencilFrontDepthFailActionByte = 1;
|
||||
uint8_t constexpr kStencilFrontPassActionByte = 0;
|
||||
|
||||
VkCompareOp DecodeTestFunction(uint8_t testFunctionByte)
|
||||
{
|
||||
switch (static_cast<TestFunction>(testFunctionByte))
|
||||
{
|
||||
case TestFunction::Never: return VK_COMPARE_OP_NEVER;
|
||||
case TestFunction::Less: return VK_COMPARE_OP_LESS;
|
||||
case TestFunction::Equal: return VK_COMPARE_OP_EQUAL;
|
||||
case TestFunction::LessOrEqual: return VK_COMPARE_OP_LESS_OR_EQUAL;
|
||||
case TestFunction::Greater: return VK_COMPARE_OP_GREATER;
|
||||
case TestFunction::NotEqual: return VK_COMPARE_OP_NOT_EQUAL;
|
||||
case TestFunction::GreaterOrEqual: return VK_COMPARE_OP_GREATER_OR_EQUAL;
|
||||
case TestFunction::Always: return VK_COMPARE_OP_ALWAYS;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
VkStencilOp DecodeStencilAction(uint8_t stencilActionByte)
|
||||
{
|
||||
switch (static_cast<StencilAction>(stencilActionByte))
|
||||
{
|
||||
case StencilAction::Keep: return VK_STENCIL_OP_KEEP;
|
||||
case StencilAction::Zero: return VK_STENCIL_OP_ZERO;
|
||||
case StencilAction::Replace: return VK_STENCIL_OP_REPLACE;
|
||||
case StencilAction::Increment: return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
|
||||
case StencilAction::IncrementWrap: return VK_STENCIL_OP_INCREMENT_AND_WRAP;
|
||||
case StencilAction::Decrement: return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
|
||||
case StencilAction::DecrementWrap: return VK_STENCIL_OP_DECREMENT_AND_WRAP;
|
||||
case StencilAction::Invert: return VK_STENCIL_OP_INVERT;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
VkFormat GetAttributeFormat(uint8_t componentCount, glConst componentType)
|
||||
{
|
||||
if (componentType == gl_const::GLFloatType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R32_SFLOAT;
|
||||
case 2: return VK_FORMAT_R32G32_SFLOAT;
|
||||
case 3: return VK_FORMAT_R32G32B32_SFLOAT;
|
||||
case 4: return VK_FORMAT_R32G32B32A32_SFLOAT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLByteType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R8_SINT;
|
||||
case 2: return VK_FORMAT_R8G8_SINT;
|
||||
case 3: return VK_FORMAT_R8G8B8_SINT;
|
||||
case 4: return VK_FORMAT_R8G8B8A8_SINT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLUnsignedByteType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R8_UINT;
|
||||
case 2: return VK_FORMAT_R8G8_UINT;
|
||||
case 3: return VK_FORMAT_R8G8B8_UINT;
|
||||
case 4: return VK_FORMAT_R8G8B8A8_UINT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLShortType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R16_SINT;
|
||||
case 2: return VK_FORMAT_R16G16_SINT;
|
||||
case 3: return VK_FORMAT_R16G16B16_SINT;
|
||||
case 4: return VK_FORMAT_R16G16B16A16_SINT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLUnsignedShortType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R16_UINT;
|
||||
case 2: return VK_FORMAT_R16G16_UINT;
|
||||
case 3: return VK_FORMAT_R16G16B16_UINT;
|
||||
case 4: return VK_FORMAT_R16G16B16A16_UINT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLIntType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R32_SINT;
|
||||
case 2: return VK_FORMAT_R32G32_SINT;
|
||||
case 3: return VK_FORMAT_R32G32B32_SINT;
|
||||
case 4: return VK_FORMAT_R32G32B32A32_SINT;
|
||||
}
|
||||
}
|
||||
else if (componentType == gl_const::GLUnsignedIntType)
|
||||
{
|
||||
switch (componentCount)
|
||||
{
|
||||
case 1: return VK_FORMAT_R32_UINT;
|
||||
case 2: return VK_FORMAT_R32G32_UINT;
|
||||
case 3: return VK_FORMAT_R32G32B32_UINT;
|
||||
case 4: return VK_FORMAT_R32G32B32A32_UINT;
|
||||
}
|
||||
}
|
||||
|
||||
CHECK(false, ("Unsupported attribute format.", componentCount, componentType));
|
||||
return VK_FORMAT_UNDEFINED;
|
||||
}
|
||||
|
||||
std::string GetDumpFilePath()
|
||||
{
|
||||
return base::JoinPath(GetPlatform().TmpDir(), kDumpFileName);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
VulkanPipeline::VulkanPipeline(VkDevice device, uint32_t appVersionCode) : m_appVersionCode(appVersionCode)
|
||||
{
|
||||
// Read dump.
|
||||
std::vector<uint8_t> dumpData;
|
||||
auto const dumpFilePath = GetDumpFilePath();
|
||||
if (GetPlatform().IsFileExistsByFullPath(dumpFilePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
FileReader r(dumpFilePath);
|
||||
NonOwningReaderSource src(r);
|
||||
|
||||
auto const v = ReadPrimitiveFromSource<uint32_t>(src);
|
||||
if (v != appVersionCode)
|
||||
{
|
||||
// Dump is obsolete.
|
||||
FileWriter::DeleteFileX(dumpFilePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
dumpData.resize(static_cast<size_t>(r.Size() - sizeof(uint32_t)));
|
||||
src.Read(dumpData.data(), dumpData.size());
|
||||
}
|
||||
}
|
||||
catch (FileReader::Exception const & exception)
|
||||
{
|
||||
LOG(LWARNING, ("Exception while reading file:", dumpFilePath, "reason:", exception.what()));
|
||||
}
|
||||
}
|
||||
|
||||
VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
|
||||
pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
|
||||
pipelineCacheCreateInfo.initialDataSize = dumpData.size();
|
||||
pipelineCacheCreateInfo.pInitialData = dumpData.data();
|
||||
auto result = vkCreatePipelineCache(device, &pipelineCacheCreateInfo, nullptr, &m_vulkanPipelineCache);
|
||||
if (result != VK_SUCCESS && pipelineCacheCreateInfo.pInitialData != nullptr)
|
||||
{
|
||||
FileWriter::DeleteFileX(dumpFilePath);
|
||||
pipelineCacheCreateInfo.initialDataSize = 0;
|
||||
pipelineCacheCreateInfo.pInitialData = nullptr;
|
||||
result = vkCreatePipelineCache(device, &pipelineCacheCreateInfo, nullptr, &m_vulkanPipelineCache);
|
||||
}
|
||||
if (result != VK_SUCCESS)
|
||||
{
|
||||
// The function vkCreatePipelineCache can return unspecified codes, so if we aren't able to
|
||||
// create pipeline cache without saved state, we consider it as a driver issue and forbid Vulkan.
|
||||
SupportManager::Instance().ForbidVulkan();
|
||||
CHECK(false, ("Fatal driver issue."));
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanPipeline::Dump(VkDevice device)
|
||||
{
|
||||
if (!m_isChanged)
|
||||
return;
|
||||
|
||||
size_t constexpr kMaxCacheSizeInBytes = 1024 * 1024;
|
||||
|
||||
size_t cacheSize;
|
||||
VkResult statusCode;
|
||||
statusCode = vkGetPipelineCacheData(device, m_vulkanPipelineCache, &cacheSize, nullptr);
|
||||
if (statusCode == VK_SUCCESS && cacheSize > 0)
|
||||
{
|
||||
if (cacheSize <= kMaxCacheSizeInBytes)
|
||||
{
|
||||
std::vector<uint8_t> dumpData(cacheSize);
|
||||
statusCode = vkGetPipelineCacheData(device, m_vulkanPipelineCache, &cacheSize, dumpData.data());
|
||||
if (statusCode == VK_SUCCESS)
|
||||
{
|
||||
auto const dumpFilePath = GetDumpFilePath();
|
||||
try
|
||||
{
|
||||
FileWriter w(dumpFilePath);
|
||||
WriteToSink(w, m_appVersionCode);
|
||||
w.Write(dumpData.data(), dumpData.size());
|
||||
}
|
||||
catch (FileWriter::Exception const & exception)
|
||||
{
|
||||
LOG(LWARNING, ("Exception while writing file:", dumpFilePath, "reason:", exception.what()));
|
||||
}
|
||||
m_isChanged = false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(LWARNING, ("Maximum pipeline cache size exceeded (", cacheSize, "/", kMaxCacheSizeInBytes, "bytes)"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanPipeline::ResetCache(VkDevice device)
|
||||
{
|
||||
for (auto const & p : m_pipelineCache)
|
||||
vkDestroyPipeline(device, p.second, nullptr);
|
||||
m_pipelineCache.clear();
|
||||
m_isChanged = true;
|
||||
}
|
||||
|
||||
void VulkanPipeline::ResetCache(VkDevice device, VkRenderPass renderPass)
|
||||
{
|
||||
for (auto it = m_pipelineCache.begin(); it != m_pipelineCache.end();)
|
||||
{
|
||||
if (it->first.m_renderPass == renderPass)
|
||||
{
|
||||
vkDestroyPipeline(device, it->second, nullptr);
|
||||
it = m_pipelineCache.erase(it);
|
||||
}
|
||||
else
|
||||
{
|
||||
++it;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanPipeline::Destroy(VkDevice device)
|
||||
{
|
||||
vkDeviceWaitIdle(device);
|
||||
Dump(device);
|
||||
ResetCache(device);
|
||||
vkDestroyPipelineCache(device, m_vulkanPipelineCache, nullptr);
|
||||
}
|
||||
|
||||
VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
|
||||
{
|
||||
CHECK(key.m_renderPass != VK_NULL_HANDLE, ());
|
||||
|
||||
auto const it = m_pipelineCache.find(key);
|
||||
if (it != m_pipelineCache.end())
|
||||
return it->second;
|
||||
|
||||
// Primitives.
|
||||
VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {};
|
||||
inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
|
||||
inputAssemblyStateCreateInfo.topology = key.m_primitiveTopology;
|
||||
|
||||
// Rasterization.
|
||||
VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {};
|
||||
rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
|
||||
rasterizationStateCreateInfo.polygonMode = VK_POLYGON_MODE_FILL;
|
||||
rasterizationStateCreateInfo.cullMode = key.m_cullingEnabled ? VK_CULL_MODE_BACK_BIT : VK_CULL_MODE_NONE;
|
||||
rasterizationStateCreateInfo.frontFace = VK_FRONT_FACE_CLOCKWISE;
|
||||
rasterizationStateCreateInfo.lineWidth = 1.0f;
|
||||
|
||||
// Blending.
|
||||
VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
|
||||
pipelineColorBlendAttachmentState.colorWriteMask =
|
||||
VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
|
||||
pipelineColorBlendAttachmentState.blendEnable = key.m_blendingEnabled ? VK_TRUE : VK_FALSE;
|
||||
if (key.m_blendingEnabled)
|
||||
{
|
||||
pipelineColorBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD;
|
||||
pipelineColorBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD;
|
||||
pipelineColorBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
|
||||
pipelineColorBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
|
||||
pipelineColorBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
|
||||
pipelineColorBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
|
||||
}
|
||||
VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = {};
|
||||
colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
|
||||
colorBlendStateCreateInfo.attachmentCount = 1;
|
||||
colorBlendStateCreateInfo.pAttachments = &pipelineColorBlendAttachmentState;
|
||||
|
||||
// Viewport.
|
||||
VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {};
|
||||
viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
|
||||
viewportStateCreateInfo.viewportCount = 1;
|
||||
viewportStateCreateInfo.scissorCount = 1;
|
||||
|
||||
// Multisampling.
|
||||
VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {};
|
||||
multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
|
||||
multisampleStateCreateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
|
||||
|
||||
// Dynamic.
|
||||
static std::array<VkDynamicState, 4> dynamicState = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR,
|
||||
VK_DYNAMIC_STATE_LINE_WIDTH, VK_DYNAMIC_STATE_STENCIL_REFERENCE};
|
||||
VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo = {};
|
||||
dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
|
||||
dynamicStateCreateInfo.pDynamicStates = dynamicState.data();
|
||||
dynamicStateCreateInfo.dynamicStateCount = static_cast<uint32_t>(dynamicState.size());
|
||||
|
||||
// Input state.
|
||||
std::vector<VkVertexInputBindingDescription> bindingDescriptions(key.m_bindingInfoCount);
|
||||
size_t attribsCount = 0;
|
||||
for (size_t i = 0; i < key.m_bindingInfoCount; ++i)
|
||||
{
|
||||
bindingDescriptions[i].binding = static_cast<uint32_t>(i);
|
||||
bindingDescriptions[i].stride = key.m_bindingInfo[i].GetElementSize();
|
||||
bindingDescriptions[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
|
||||
attribsCount += key.m_bindingInfo[i].GetCount();
|
||||
}
|
||||
|
||||
std::vector<VkVertexInputAttributeDescription> attributeDescriptions(attribsCount);
|
||||
uint32_t bindingCounter = 0;
|
||||
for (size_t i = 0; i < key.m_bindingInfoCount; ++i)
|
||||
{
|
||||
for (uint8_t j = 0; j < key.m_bindingInfo[i].GetCount(); ++j)
|
||||
{
|
||||
BindingDecl const & bindingDecl = key.m_bindingInfo[i].GetBindingDecl(j);
|
||||
attributeDescriptions[bindingCounter].location = bindingCounter;
|
||||
attributeDescriptions[bindingCounter].binding = static_cast<uint32_t>(i);
|
||||
attributeDescriptions[bindingCounter].format =
|
||||
GetAttributeFormat(bindingDecl.m_componentCount, bindingDecl.m_componentType);
|
||||
attributeDescriptions[bindingCounter].offset = bindingDecl.m_offset;
|
||||
|
||||
bindingCounter++;
|
||||
}
|
||||
}
|
||||
|
||||
VkPipelineVertexInputStateCreateInfo inputStateCreateInfo = {};
|
||||
inputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
|
||||
inputStateCreateInfo.vertexBindingDescriptionCount = static_cast<uint32_t>(bindingDescriptions.size());
|
||||
inputStateCreateInfo.pVertexBindingDescriptions = bindingDescriptions.data();
|
||||
inputStateCreateInfo.vertexAttributeDescriptionCount = static_cast<uint32_t>(attributeDescriptions.size());
|
||||
inputStateCreateInfo.pVertexAttributeDescriptions = attributeDescriptions.data();
|
||||
|
||||
// Depth stencil.
|
||||
VkPipelineDepthStencilStateCreateInfo depthStencilState = {};
|
||||
depthStencilState.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
|
||||
depthStencilState.depthTestEnable = key.m_depthStencil.m_depthEnabled ? VK_TRUE : VK_FALSE;
|
||||
if (key.m_depthStencil.m_depthEnabled)
|
||||
{
|
||||
depthStencilState.depthWriteEnable = VK_TRUE;
|
||||
depthStencilState.depthCompareOp = DecodeTestFunction(static_cast<uint8_t>(key.m_depthStencil.m_depthFunction));
|
||||
}
|
||||
else
|
||||
{
|
||||
depthStencilState.depthWriteEnable = VK_FALSE;
|
||||
depthStencilState.depthCompareOp = VK_COMPARE_OP_ALWAYS;
|
||||
}
|
||||
|
||||
if (key.m_depthStencil.m_stencilEnabled)
|
||||
{
|
||||
depthStencilState.stencilTestEnable = VK_TRUE;
|
||||
depthStencilState.front.compareOp =
|
||||
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFunctionByte));
|
||||
depthStencilState.front.failOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFailActionByte));
|
||||
depthStencilState.front.depthFailOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontDepthFailActionByte));
|
||||
depthStencilState.front.passOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontPassActionByte));
|
||||
depthStencilState.front.writeMask = 0xffffffff;
|
||||
depthStencilState.front.compareMask = 0xffffffff;
|
||||
depthStencilState.front.reference = 1;
|
||||
|
||||
depthStencilState.back.compareOp =
|
||||
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFunctionByte));
|
||||
depthStencilState.back.failOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFailActionByte));
|
||||
depthStencilState.back.depthFailOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackDepthFailActionByte));
|
||||
depthStencilState.back.passOp =
|
||||
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackPassActionByte));
|
||||
depthStencilState.back.writeMask = 0xffffffff;
|
||||
depthStencilState.back.compareMask = 0xffffffff;
|
||||
depthStencilState.back.reference = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
depthStencilState.stencilTestEnable = VK_FALSE;
|
||||
depthStencilState.front.compareOp = VK_COMPARE_OP_ALWAYS;
|
||||
depthStencilState.back.compareOp = VK_COMPARE_OP_ALWAYS;
|
||||
}
|
||||
|
||||
// Pipeline.
|
||||
VkGraphicsPipelineCreateInfo pipelineCreateInfo = {};
|
||||
pipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
|
||||
pipelineCreateInfo.layout = key.m_program->GetPipelineLayout();
|
||||
CHECK(pipelineCreateInfo.layout != VK_NULL_HANDLE, ());
|
||||
pipelineCreateInfo.renderPass = key.m_renderPass;
|
||||
pipelineCreateInfo.basePipelineIndex = -1;
|
||||
pipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
|
||||
pipelineCreateInfo.pVertexInputState = &inputStateCreateInfo;
|
||||
pipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
|
||||
pipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
|
||||
pipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
|
||||
pipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
|
||||
pipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
|
||||
pipelineCreateInfo.pDepthStencilState = &depthStencilState;
|
||||
pipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
|
||||
auto shaders = key.m_program->GetShaders();
|
||||
pipelineCreateInfo.stageCount = static_cast<uint32_t>(shaders.size());
|
||||
pipelineCreateInfo.pStages = shaders.data();
|
||||
|
||||
VkPipeline pipeline;
|
||||
auto const result =
|
||||
vkCreateGraphicsPipelines(device, m_vulkanPipelineCache, 1, &pipelineCreateInfo, nullptr, &pipeline);
|
||||
if (result == VK_INCOMPLETE)
|
||||
{
|
||||
// Some Adreno GPUs return this not standard compliant code.
|
||||
// https://developer.qualcomm.com/forum/qdn-forums/software/adreno-gpu-sdk/34709
|
||||
// Now we are not able to continue using Vulkan rendering on them.
|
||||
dp::SupportManager::Instance().ForbidVulkan();
|
||||
CHECK(false, ("Fatal driver issue."));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_RESULT_VK_CALL(vkCreateGraphicsPipelines, result);
|
||||
}
|
||||
|
||||
m_pipelineCache.insert(std::make_pair(key, pipeline));
|
||||
m_isChanged = true;
|
||||
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
void VulkanPipeline::DepthStencilKey::SetDepthTestEnabled(bool enabled)
|
||||
{
|
||||
m_depthEnabled = enabled;
|
||||
}
|
||||
|
||||
void VulkanPipeline::DepthStencilKey::SetDepthTestFunction(TestFunction depthFunction)
|
||||
{
|
||||
m_depthFunction = depthFunction;
|
||||
}
|
||||
|
||||
void VulkanPipeline::DepthStencilKey::SetStencilTestEnabled(bool enabled)
|
||||
{
|
||||
m_stencilEnabled = enabled;
|
||||
}
|
||||
|
||||
void VulkanPipeline::DepthStencilKey::SetStencilFunction(StencilFace face, TestFunction stencilFunction)
|
||||
{
|
||||
switch (face)
|
||||
{
|
||||
case StencilFace::Front:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFunction), kStencilFrontFunctionByte);
|
||||
break;
|
||||
case StencilFace::Back:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFunction), kStencilBackFunctionByte);
|
||||
break;
|
||||
case StencilFace::FrontAndBack:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFunction), kStencilFrontFunctionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFunction), kStencilBackFunctionByte);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanPipeline::DepthStencilKey::SetStencilActions(StencilFace face, StencilAction stencilFailAction,
|
||||
StencilAction depthFailAction, StencilAction passAction)
|
||||
{
|
||||
switch (face)
|
||||
{
|
||||
case StencilFace::Front:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFailAction), kStencilFrontFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(depthFailAction), kStencilFrontDepthFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(passAction), kStencilFrontPassActionByte);
|
||||
break;
|
||||
case StencilFace::Back:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFailAction), kStencilBackFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(depthFailAction), kStencilBackDepthFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(passAction), kStencilBackPassActionByte);
|
||||
break;
|
||||
case StencilFace::FrontAndBack:
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFailAction), kStencilFrontFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(depthFailAction), kStencilFrontDepthFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(passAction), kStencilFrontPassActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(stencilFailAction), kStencilBackFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(depthFailAction), kStencilBackDepthFailActionByte);
|
||||
SetStateByte(m_stencil, static_cast<uint8_t>(passAction), kStencilBackPassActionByte);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool VulkanPipeline::DepthStencilKey::operator<(DepthStencilKey const & rhs) const
|
||||
{
|
||||
if (m_depthEnabled != rhs.m_depthEnabled)
|
||||
return m_depthEnabled < rhs.m_depthEnabled;
|
||||
|
||||
if (m_stencilEnabled != rhs.m_stencilEnabled)
|
||||
return m_stencilEnabled < rhs.m_stencilEnabled;
|
||||
|
||||
if (m_depthFunction != rhs.m_depthFunction)
|
||||
return m_depthFunction < rhs.m_depthFunction;
|
||||
|
||||
return m_stencil < rhs.m_stencil;
|
||||
}
|
||||
|
||||
bool VulkanPipeline::DepthStencilKey::operator!=(DepthStencilKey const & rhs) const
|
||||
{
|
||||
return m_depthEnabled != rhs.m_depthEnabled || m_stencilEnabled != rhs.m_stencilEnabled ||
|
||||
m_depthFunction != rhs.m_depthFunction || m_stencil != rhs.m_stencil;
|
||||
}
|
||||
|
||||
bool VulkanPipeline::PipelineKey::operator<(PipelineKey const & rhs) const
|
||||
{
|
||||
if (m_renderPass != rhs.m_renderPass)
|
||||
return m_renderPass < rhs.m_renderPass;
|
||||
|
||||
if (m_program != rhs.m_program)
|
||||
return m_program < rhs.m_program;
|
||||
|
||||
if (m_depthStencil != rhs.m_depthStencil)
|
||||
return m_depthStencil < rhs.m_depthStencil;
|
||||
|
||||
if (m_bindingInfoCount != rhs.m_bindingInfoCount)
|
||||
return m_bindingInfoCount < rhs.m_bindingInfoCount;
|
||||
|
||||
for (uint8_t i = 0; i < m_bindingInfoCount; ++i)
|
||||
if (m_bindingInfo[i] != rhs.m_bindingInfo[i])
|
||||
return m_bindingInfo[i] < rhs.m_bindingInfo[i];
|
||||
|
||||
if (m_primitiveTopology != rhs.m_primitiveTopology)
|
||||
return m_primitiveTopology < rhs.m_primitiveTopology;
|
||||
|
||||
if (m_blendingEnabled != rhs.m_blendingEnabled)
|
||||
return m_blendingEnabled < rhs.m_blendingEnabled;
|
||||
|
||||
return m_cullingEnabled < rhs.m_cullingEnabled;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
67
libs/drape/vulkan/vulkan_pipeline.hpp
Normal file
67
libs/drape/vulkan/vulkan_pipeline.hpp
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/binding_info.hpp"
|
||||
#include "drape/graphics_context.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/texture_types.hpp"
|
||||
#include "drape/vulkan/vulkan_gpu_program.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
#include <map>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanPipeline
|
||||
{
|
||||
public:
|
||||
struct DepthStencilKey
|
||||
{
|
||||
void SetDepthTestEnabled(bool enabled);
|
||||
void SetDepthTestFunction(TestFunction depthFunction);
|
||||
void SetStencilTestEnabled(bool enabled);
|
||||
void SetStencilFunction(StencilFace face, TestFunction stencilFunction);
|
||||
void SetStencilActions(StencilFace face, StencilAction stencilFailAction, StencilAction depthFailAction,
|
||||
StencilAction passAction);
|
||||
bool operator<(DepthStencilKey const & rhs) const;
|
||||
bool operator!=(DepthStencilKey const & rhs) const;
|
||||
|
||||
bool m_depthEnabled = false;
|
||||
bool m_stencilEnabled = false;
|
||||
TestFunction m_depthFunction = TestFunction::Always;
|
||||
uint64_t m_stencil = 0;
|
||||
};
|
||||
|
||||
struct PipelineKey
|
||||
{
|
||||
bool operator<(PipelineKey const & rhs) const;
|
||||
|
||||
VkRenderPass m_renderPass = {};
|
||||
ref_ptr<VulkanGpuProgram> m_program;
|
||||
DepthStencilKey m_depthStencil;
|
||||
BindingInfoArray m_bindingInfo;
|
||||
uint8_t m_bindingInfoCount = 0;
|
||||
VkPrimitiveTopology m_primitiveTopology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
|
||||
bool m_blendingEnabled = false;
|
||||
bool m_cullingEnabled = true;
|
||||
};
|
||||
|
||||
VulkanPipeline(VkDevice device, uint32_t appVersionCode);
|
||||
void Dump(VkDevice device);
|
||||
void Destroy(VkDevice device);
|
||||
void ResetCache(VkDevice device);
|
||||
void ResetCache(VkDevice device, VkRenderPass renderPass);
|
||||
|
||||
VkPipeline GetPipeline(VkDevice device, PipelineKey const & key);
|
||||
|
||||
private:
|
||||
uint32_t const m_appVersionCode;
|
||||
VkPipelineCache m_vulkanPipelineCache;
|
||||
|
||||
using PipelineCache = std::map<PipelineKey, VkPipeline>;
|
||||
PipelineCache m_pipelineCache;
|
||||
bool m_isChanged = false;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
116
libs/drape/vulkan/vulkan_staging_buffer.cpp
Normal file
116
libs/drape/vulkan/vulkan_staging_buffer.cpp
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
#include "drape/vulkan/vulkan_staging_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/logging.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
// The most GPUs use this value, real one can be known only after buffer creation.
|
||||
uint32_t constexpr kDefaultAlignment = 64;
|
||||
|
||||
VulkanStagingBuffer::VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager, uint32_t sizeInBytes)
|
||||
: m_objectManager(objectManager)
|
||||
, m_sizeInBytes(VulkanMemoryManager::GetAligned(sizeInBytes, kDefaultAlignment))
|
||||
{
|
||||
auto constexpr kStagingBuffer = VulkanMemoryManager::ResourceType::Staging;
|
||||
VkDevice device = m_objectManager->GetDevice();
|
||||
auto const & mm = m_objectManager->GetMemoryManager();
|
||||
|
||||
m_object = m_objectManager->CreateBuffer(kStagingBuffer, m_sizeInBytes, 0 /* batcherHash */);
|
||||
VkMemoryRequirements memReqs = {};
|
||||
vkGetBufferMemoryRequirements(device, m_object.m_buffer, &memReqs);
|
||||
|
||||
// We must be able to map the whole range.
|
||||
m_sizeAlignment = mm.GetSizeAlignment(memReqs);
|
||||
if (!HasEnoughSpace(m_sizeInBytes))
|
||||
{
|
||||
// This GPU uses non-standard alignment we have to recreate buffer.
|
||||
auto const originalSize = m_sizeInBytes;
|
||||
auto const originalAlignment = m_sizeAlignment;
|
||||
m_sizeInBytes = VulkanMemoryManager::GetAligned(sizeInBytes, m_sizeAlignment);
|
||||
m_objectManager->DestroyObjectUnsafe(m_object);
|
||||
m_object = m_objectManager->CreateBuffer(kStagingBuffer, m_sizeInBytes, 0 /* batcherHash */);
|
||||
vkGetBufferMemoryRequirements(device, m_object.m_buffer, &memReqs);
|
||||
m_sizeAlignment = mm.GetSizeAlignment(memReqs);
|
||||
CHECK(HasEnoughSpace(m_sizeInBytes), ("originalSize =", originalSize, "originalAlignment =", originalAlignment,
|
||||
"m_sizeInBytes =", m_sizeInBytes, "m_sizeAlignment =", m_sizeAlignment));
|
||||
}
|
||||
|
||||
m_offsetAlignment = mm.GetOffsetAlignment(kStagingBuffer);
|
||||
m_pointer = m_objectManager->MapUnsafe(m_object);
|
||||
}
|
||||
|
||||
VulkanStagingBuffer::~VulkanStagingBuffer()
|
||||
{
|
||||
m_objectManager->UnmapUnsafe(m_object);
|
||||
m_objectManager->DestroyObject(m_object);
|
||||
}
|
||||
|
||||
bool VulkanStagingBuffer::HasEnoughSpace(uint32_t sizeInBytes) const
|
||||
{
|
||||
CHECK_THREAD_CHECKER(m_threadChecker, ());
|
||||
auto const & mm = m_objectManager->GetMemoryManager();
|
||||
auto const alignedSize = mm.GetAligned(sizeInBytes, m_sizeAlignment);
|
||||
return m_offset + alignedSize <= m_sizeInBytes;
|
||||
}
|
||||
|
||||
VulkanStagingBuffer::StagingData VulkanStagingBuffer::Reserve(uint32_t sizeInBytes)
|
||||
{
|
||||
CHECK_THREAD_CHECKER(m_threadChecker, ());
|
||||
CHECK(m_offset % m_offsetAlignment == 0, ());
|
||||
CHECK(HasEnoughSpace(sizeInBytes), ());
|
||||
auto const & mm = m_objectManager->GetMemoryManager();
|
||||
|
||||
auto const alignedOffset = m_offset;
|
||||
auto const alignedSize = mm.GetAligned(sizeInBytes, m_sizeAlignment);
|
||||
uint8_t * ptr = m_pointer + alignedOffset;
|
||||
|
||||
// Update offset and align it.
|
||||
m_offset += alignedSize;
|
||||
m_offset = std::min(mm.GetAligned(m_offset, m_offsetAlignment), m_object.GetAlignedSize());
|
||||
|
||||
StagingData result;
|
||||
result.m_stagingBuffer = m_object.m_buffer;
|
||||
result.m_pointer = ptr;
|
||||
result.m_offset = alignedOffset;
|
||||
result.m_size = sizeInBytes;
|
||||
return result;
|
||||
}
|
||||
|
||||
uint32_t VulkanStagingBuffer::ReserveWithId(uint32_t sizeInBytes, StagingData & data)
|
||||
{
|
||||
CHECK_THREAD_CHECKER(m_threadChecker, ());
|
||||
data = Reserve(sizeInBytes);
|
||||
m_reservation.push_back(data);
|
||||
return static_cast<uint32_t>(m_reservation.size()) - 1;
|
||||
}
|
||||
|
||||
VulkanStagingBuffer::StagingData const & VulkanStagingBuffer::GetReservationById(uint32_t id) const
|
||||
{
|
||||
CHECK_LESS(id, m_reservation.size(), ());
|
||||
return m_reservation[id];
|
||||
}
|
||||
|
||||
void VulkanStagingBuffer::Flush()
|
||||
{
|
||||
CHECK_THREAD_CHECKER(m_threadChecker, ());
|
||||
if (m_offset == 0)
|
||||
return;
|
||||
|
||||
auto const size = m_offset;
|
||||
m_objectManager->FlushUnsafe(m_object, 0 /* offset */, size);
|
||||
}
|
||||
|
||||
void VulkanStagingBuffer::Reset()
|
||||
{
|
||||
CHECK_THREAD_CHECKER(m_threadChecker, ());
|
||||
m_offset = 0;
|
||||
m_reservation.clear();
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
51
libs/drape/vulkan/vulkan_staging_buffer.hpp
Normal file
51
libs/drape/vulkan/vulkan_staging_buffer.hpp
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
|
||||
#include "base/thread_checker.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanStagingBuffer
|
||||
{
|
||||
public:
|
||||
VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager, uint32_t sizeInBytes);
|
||||
~VulkanStagingBuffer();
|
||||
|
||||
struct StagingData
|
||||
{
|
||||
VkBuffer m_stagingBuffer = {};
|
||||
uint8_t * m_pointer = nullptr;
|
||||
uint32_t m_offset = 0;
|
||||
uint32_t m_size = 0;
|
||||
operator bool() const { return m_stagingBuffer != 0 && m_pointer != nullptr; }
|
||||
};
|
||||
|
||||
bool HasEnoughSpace(uint32_t sizeInBytes) const;
|
||||
|
||||
StagingData Reserve(uint32_t sizeInBytes);
|
||||
uint32_t ReserveWithId(uint32_t sizeInBytes, StagingData & data);
|
||||
StagingData const & GetReservationById(uint32_t id) const;
|
||||
void Flush();
|
||||
void Reset();
|
||||
|
||||
private:
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
uint32_t m_sizeInBytes;
|
||||
VulkanObject m_object;
|
||||
uint32_t m_offsetAlignment = 0;
|
||||
uint32_t m_sizeAlignment = 0;
|
||||
uint8_t * m_pointer = nullptr;
|
||||
uint32_t m_offset = 0;
|
||||
std::vector<StagingData> m_reservation;
|
||||
ThreadChecker m_threadChecker;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
292
libs/drape/vulkan/vulkan_texture.cpp
Normal file
292
libs/drape/vulkan/vulkan_texture.cpp
Normal file
|
|
@ -0,0 +1,292 @@
|
|||
#include "drape/vulkan/vulkan_texture.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
|
||||
drape_ptr<dp::HWTextureAllocator> CreateVulkanAllocator()
|
||||
{
|
||||
return make_unique_dp<dp::vulkan::VulkanTextureAllocator>();
|
||||
}
|
||||
|
||||
ref_ptr<dp::HWTextureAllocator> GetDefaultVulkanAllocator()
|
||||
{
|
||||
static dp::vulkan::VulkanTextureAllocator allocator;
|
||||
return make_ref(&allocator);
|
||||
}
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
VkBufferImageCopy BufferCopyRegion(uint32_t x, uint32_t y, uint32_t width, uint32_t height, uint32_t stagingOffset)
|
||||
{
|
||||
VkBufferImageCopy bufferCopyRegion = {};
|
||||
bufferCopyRegion.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
bufferCopyRegion.imageSubresource.mipLevel = 0;
|
||||
bufferCopyRegion.imageSubresource.baseArrayLayer = 0;
|
||||
bufferCopyRegion.imageSubresource.layerCount = 1;
|
||||
bufferCopyRegion.imageExtent.width = width;
|
||||
bufferCopyRegion.imageExtent.height = height;
|
||||
bufferCopyRegion.imageExtent.depth = 1;
|
||||
bufferCopyRegion.imageOffset.x = x;
|
||||
bufferCopyRegion.imageOffset.y = y;
|
||||
bufferCopyRegion.imageOffset.z = 0;
|
||||
bufferCopyRegion.bufferOffset = stagingOffset;
|
||||
return bufferCopyRegion;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
drape_ptr<HWTexture> VulkanTextureAllocator::CreateTexture(ref_ptr<dp::GraphicsContext> context)
|
||||
{
|
||||
return make_unique_dp<VulkanTexture>(make_ref(this));
|
||||
}
|
||||
|
||||
VulkanTexture::~VulkanTexture()
|
||||
{
|
||||
m_objectManager->DestroyObject(m_textureObject);
|
||||
}
|
||||
|
||||
void VulkanTexture::Create(ref_ptr<dp::GraphicsContext> context, Params const & params, ref_ptr<void> data)
|
||||
{
|
||||
Base::Create(context, params, data);
|
||||
|
||||
static uint32_t textureCounter = 0;
|
||||
textureCounter++;
|
||||
m_textureID = textureCounter;
|
||||
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
m_objectManager = vulkanContext->GetObjectManager();
|
||||
|
||||
if (Validate())
|
||||
{
|
||||
m_objectManager->DestroyObject(m_textureObject);
|
||||
m_textureObject = {};
|
||||
}
|
||||
|
||||
auto const format = VulkanFormatUnpacker::Unpack(params.m_format);
|
||||
|
||||
VkFormatProperties formatProperties;
|
||||
vkGetPhysicalDeviceFormatProperties(vulkanContext->GetPhysicalDevice(), format, &formatProperties);
|
||||
VkImageTiling tiling = VK_IMAGE_TILING_LINEAR;
|
||||
if (formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)
|
||||
tiling = VK_IMAGE_TILING_OPTIMAL;
|
||||
|
||||
m_isMutable = params.m_isMutable;
|
||||
if (params.m_isRenderTarget)
|
||||
{
|
||||
// Create image.
|
||||
if (params.m_format == TextureFormat::DepthStencil || params.m_format == TextureFormat::Depth)
|
||||
{
|
||||
m_aspectFlags = params.m_format == TextureFormat::DepthStencil
|
||||
? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
|
||||
: VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, format, tiling,
|
||||
m_aspectFlags, params.m_width, params.m_height);
|
||||
}
|
||||
else
|
||||
{
|
||||
m_aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
|
||||
format, tiling, m_aspectFlags, params.m_width, params.m_height);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const bufferSize = GetBytesPerPixel(params.m_format) * params.m_width * params.m_height;
|
||||
|
||||
// Create temporary staging buffer.
|
||||
m_creationStagingBuffer = make_unique_dp<VulkanStagingBuffer>(m_objectManager, bufferSize);
|
||||
ASSERT(m_creationStagingBuffer->HasEnoughSpace(bufferSize), ());
|
||||
VulkanStagingBuffer::StagingData staging;
|
||||
m_reservationId = m_creationStagingBuffer->ReserveWithId(bufferSize, staging);
|
||||
if (data != nullptr)
|
||||
memcpy(staging.m_pointer, data.get(), bufferSize);
|
||||
else
|
||||
memset(staging.m_pointer, 0, bufferSize);
|
||||
m_creationStagingBuffer->Flush();
|
||||
|
||||
// Create image.
|
||||
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_SAMPLED_BIT, format, tiling,
|
||||
VK_IMAGE_ASPECT_COLOR_BIT, params.m_width, params.m_height);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanTexture::UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y, uint32_t width,
|
||||
uint32_t height, ref_ptr<void> data)
|
||||
{
|
||||
CHECK(m_isMutable, ("Upload data is avaivable only for mutable textures."));
|
||||
CHECK(m_creationStagingBuffer == nullptr, ());
|
||||
CHECK(m_objectManager != nullptr, ());
|
||||
CHECK(data != nullptr, ());
|
||||
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentMemoryCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
Bind(context);
|
||||
|
||||
auto const sizeInBytes = GetBytesPerPixel(GetFormat()) * width * height;
|
||||
|
||||
VkBuffer sb;
|
||||
uint32_t offset;
|
||||
auto stagingBuffer = vulkanContext->GetDefaultStagingBuffer();
|
||||
if (stagingBuffer->HasEnoughSpace(sizeInBytes))
|
||||
{
|
||||
auto staging = stagingBuffer->Reserve(sizeInBytes);
|
||||
memcpy(staging.m_pointer, data.get(), sizeInBytes);
|
||||
sb = staging.m_stagingBuffer;
|
||||
offset = staging.m_offset;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Here we use temporary staging object, which will be destroyed after the nearest
|
||||
// command queue submitting.
|
||||
VulkanStagingBuffer tempStagingBuffer(m_objectManager, sizeInBytes);
|
||||
CHECK(tempStagingBuffer.HasEnoughSpace(sizeInBytes), ());
|
||||
auto staging = tempStagingBuffer.Reserve(sizeInBytes);
|
||||
memcpy(staging.m_pointer, data.get(), sizeInBytes);
|
||||
tempStagingBuffer.Flush();
|
||||
sb = staging.m_stagingBuffer;
|
||||
offset = staging.m_offset;
|
||||
}
|
||||
|
||||
// Here we use VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, because we also read textures
|
||||
// in vertex shaders.
|
||||
MakeImageLayoutTransition(
|
||||
commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT);
|
||||
|
||||
auto bufferCopyRegion = BufferCopyRegion(x, y, width, height, offset);
|
||||
vkCmdCopyBufferToImage(commandBuffer, sb, m_textureObject.m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
|
||||
&bufferCopyRegion);
|
||||
|
||||
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
|
||||
}
|
||||
|
||||
void VulkanTexture::Bind(ref_ptr<dp::GraphicsContext> context) const
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentMemoryCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
// Fill texture on the first bind.
|
||||
if (m_creationStagingBuffer != nullptr)
|
||||
{
|
||||
// Here we use VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, because we also read textures
|
||||
// in vertex shaders.
|
||||
MakeImageLayoutTransition(
|
||||
commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT);
|
||||
|
||||
auto staging = m_creationStagingBuffer->GetReservationById(m_reservationId);
|
||||
auto bufferCopyRegion = BufferCopyRegion(0, 0, GetWidth(), GetHeight(), staging.m_offset);
|
||||
vkCmdCopyBufferToImage(commandBuffer, staging.m_stagingBuffer, m_textureObject.m_image,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &bufferCopyRegion);
|
||||
|
||||
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
|
||||
|
||||
m_creationStagingBuffer.reset();
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanTexture::SetFilter(TextureFilter filter)
|
||||
{
|
||||
m_params.m_filter = filter;
|
||||
}
|
||||
|
||||
bool VulkanTexture::Validate() const
|
||||
{
|
||||
return m_textureObject.m_image != VK_NULL_HANDLE && m_textureObject.m_imageView != VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
SamplerKey VulkanTexture::GetSamplerKey() const
|
||||
{
|
||||
return SamplerKey(m_params.m_filter, m_params.m_wrapSMode, m_params.m_wrapTMode);
|
||||
}
|
||||
|
||||
void VulkanTexture::MakeImageLayoutTransition(VkCommandBuffer commandBuffer, VkImageLayout newLayout,
|
||||
VkPipelineStageFlags srcStageMask,
|
||||
VkPipelineStageFlags dstStageMask) const
|
||||
{
|
||||
VkAccessFlags srcAccessMask = 0;
|
||||
VkAccessFlags dstAccessMask = 0;
|
||||
|
||||
VkPipelineStageFlags const noAccessMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
|
||||
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
|
||||
VkPipelineStageFlags srcRemainingMask = srcStageMask & ~noAccessMask;
|
||||
VkPipelineStageFlags dstRemainingMask = dstStageMask & ~noAccessMask;
|
||||
|
||||
auto const srcTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit, VkAccessFlags accessBits)
|
||||
{
|
||||
if (srcStageMask & stageBit)
|
||||
{
|
||||
srcAccessMask |= accessBits;
|
||||
srcRemainingMask &= ~stageBit;
|
||||
}
|
||||
};
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_SHADER_READ_BIT);
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
||||
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT);
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT);
|
||||
|
||||
srcTestAndRemoveBit(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
|
||||
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
|
||||
|
||||
CHECK(srcRemainingMask == 0, ("Not implemented transition for src pipeline stage"));
|
||||
|
||||
auto const dstTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit, VkAccessFlags accessBits)
|
||||
{
|
||||
if (dstStageMask & stageBit)
|
||||
{
|
||||
dstAccessMask |= accessBits;
|
||||
dstRemainingMask &= ~stageBit;
|
||||
}
|
||||
};
|
||||
|
||||
dstTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
|
||||
|
||||
dstTestAndRemoveBit(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
|
||||
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
|
||||
|
||||
dstTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_SHADER_READ_BIT);
|
||||
|
||||
dstTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
||||
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT);
|
||||
|
||||
dstTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT);
|
||||
|
||||
CHECK(dstRemainingMask == 0, ("Not implemented transition for dest pipeline stage"));
|
||||
|
||||
VkImageMemoryBarrier imageMemoryBarrier = {};
|
||||
imageMemoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
imageMemoryBarrier.pNext = nullptr;
|
||||
imageMemoryBarrier.srcAccessMask = srcAccessMask;
|
||||
imageMemoryBarrier.dstAccessMask = dstAccessMask;
|
||||
imageMemoryBarrier.oldLayout = m_currentLayout;
|
||||
imageMemoryBarrier.newLayout = newLayout;
|
||||
imageMemoryBarrier.image = GetImage();
|
||||
imageMemoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageMemoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageMemoryBarrier.subresourceRange.aspectMask = m_aspectFlags;
|
||||
imageMemoryBarrier.subresourceRange.baseMipLevel = 0;
|
||||
imageMemoryBarrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
|
||||
imageMemoryBarrier.subresourceRange.baseArrayLayer = 0;
|
||||
imageMemoryBarrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
|
||||
|
||||
vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &imageMemoryBarrier);
|
||||
|
||||
m_currentLayout = newLayout;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
55
libs/drape/vulkan/vulkan_texture.hpp
Normal file
55
libs/drape/vulkan/vulkan_texture.hpp
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/hw_texture.hpp"
|
||||
#include "drape/pointers.hpp"
|
||||
#include "drape/vulkan/vulkan_object_manager.hpp"
|
||||
#include "drape/vulkan/vulkan_staging_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanTextureAllocator : public HWTextureAllocator
|
||||
{
|
||||
public:
|
||||
drape_ptr<HWTexture> CreateTexture(ref_ptr<dp::GraphicsContext> context) override;
|
||||
void Flush() override {}
|
||||
};
|
||||
|
||||
class VulkanTexture : public HWTexture
|
||||
{
|
||||
using Base = HWTexture;
|
||||
|
||||
public:
|
||||
explicit VulkanTexture(ref_ptr<VulkanTextureAllocator>) {}
|
||||
~VulkanTexture() override;
|
||||
|
||||
void Create(ref_ptr<dp::GraphicsContext> context, Params const & params, ref_ptr<void> data) override;
|
||||
void UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y, uint32_t width, uint32_t height,
|
||||
ref_ptr<void> data) override;
|
||||
void Bind(ref_ptr<dp::GraphicsContext> context) const override;
|
||||
void SetFilter(TextureFilter filter) override;
|
||||
bool Validate() const override;
|
||||
|
||||
VkImageView GetTextureView() const { return m_textureObject.m_imageView; }
|
||||
VkImage GetImage() const { return m_textureObject.m_image; }
|
||||
SamplerKey GetSamplerKey() const;
|
||||
VkImageLayout GetCurrentLayout() const { return m_currentLayout; }
|
||||
|
||||
void MakeImageLayoutTransition(VkCommandBuffer commandBuffer, VkImageLayout newLayout,
|
||||
VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) const;
|
||||
|
||||
private:
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
VulkanObject m_textureObject;
|
||||
mutable VkImageLayout m_currentLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
VkImageAspectFlags m_aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
mutable drape_ptr<VulkanStagingBuffer> m_creationStagingBuffer;
|
||||
uint32_t m_reservationId = 0;
|
||||
bool m_isMutable = false;
|
||||
};
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
207
libs/drape/vulkan/vulkan_utils.cpp
Normal file
207
libs/drape/vulkan/vulkan_utils.cpp
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <string>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
namespace
|
||||
{
|
||||
// Sampler package.
|
||||
uint8_t constexpr kWrapSModeByte = 3;
|
||||
uint8_t constexpr kWrapTModeByte = 2;
|
||||
uint8_t constexpr kMagFilterByte = 1;
|
||||
uint8_t constexpr kMinFilterByte = 0;
|
||||
} // namespace
|
||||
|
||||
VkDevice DebugName::m_device = VK_NULL_HANDLE;
|
||||
PFN_vkSetDebugUtilsObjectNameEXT DebugName::vkSetDebugUtilsObjectNameEXT = nullptr;
|
||||
|
||||
static bool gUse32bitDepth8bitStencil = false;
|
||||
|
||||
void DebugName::Init(VkInstance instance, VkDevice device)
|
||||
{
|
||||
vkSetDebugUtilsObjectNameEXT =
|
||||
(PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(instance, "vkSetDebugUtilsObjectNameEXT");
|
||||
m_device = device;
|
||||
}
|
||||
|
||||
void DebugName::Set(VkObjectType type, uint64_t handle, char const * name)
|
||||
{
|
||||
if (vkSetDebugUtilsObjectNameEXT == nullptr)
|
||||
return;
|
||||
|
||||
VkDebugUtilsObjectNameInfoEXT const info = {.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
|
||||
.pNext = nullptr,
|
||||
.objectType = type,
|
||||
.objectHandle = handle,
|
||||
.pObjectName = name};
|
||||
CHECK_VK_CALL(vkSetDebugUtilsObjectNameEXT(m_device, &info));
|
||||
}
|
||||
|
||||
std::string GetVulkanResultString(VkResult result)
|
||||
{
|
||||
switch (result)
|
||||
{
|
||||
case VK_SUCCESS: return "VK_SUCCESS";
|
||||
case VK_NOT_READY: return "VK_NOT_READY";
|
||||
case VK_TIMEOUT: return "VK_TIMEOUT";
|
||||
case VK_EVENT_SET: return "VK_EVENT_SET";
|
||||
case VK_EVENT_RESET: return "VK_EVENT_RESET";
|
||||
case VK_INCOMPLETE: return "VK_INCOMPLETE";
|
||||
case VK_ERROR_OUT_OF_HOST_MEMORY: return "VK_ERROR_OUT_OF_HOST_MEMORY";
|
||||
case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
|
||||
case VK_ERROR_INITIALIZATION_FAILED: return "VK_ERROR_INITIALIZATION_FAILED";
|
||||
case VK_ERROR_DEVICE_LOST: return "VK_ERROR_DEVICE_LOST";
|
||||
case VK_ERROR_MEMORY_MAP_FAILED: return "VK_ERROR_MEMORY_MAP_FAILED";
|
||||
case VK_ERROR_LAYER_NOT_PRESENT: return "VK_ERROR_LAYER_NOT_PRESENT";
|
||||
case VK_ERROR_EXTENSION_NOT_PRESENT: return "VK_ERROR_EXTENSION_NOT_PRESENT";
|
||||
case VK_ERROR_FEATURE_NOT_PRESENT: return "VK_ERROR_FEATURE_NOT_PRESENT";
|
||||
case VK_ERROR_INCOMPATIBLE_DRIVER: return "VK_ERROR_INCOMPATIBLE_DRIVER";
|
||||
case VK_ERROR_TOO_MANY_OBJECTS: return "VK_ERROR_TOO_MANY_OBJECTS";
|
||||
case VK_ERROR_FORMAT_NOT_SUPPORTED: return "VK_ERROR_FORMAT_NOT_SUPPORTED";
|
||||
case VK_ERROR_FRAGMENTED_POOL: return "VK_ERROR_FRAGMENTED_POOL";
|
||||
case VK_ERROR_SURFACE_LOST_KHR: return "VK_ERROR_SURFACE_LOST_KHR";
|
||||
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
|
||||
case VK_SUBOPTIMAL_KHR: return "VK_SUBOPTIMAL_KHR";
|
||||
case VK_ERROR_OUT_OF_DATE_KHR: return "VK_ERROR_OUT_OF_DATE_KHR";
|
||||
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
|
||||
case VK_ERROR_VALIDATION_FAILED_EXT: return "VK_ERROR_VALIDATION_FAILED_EXT";
|
||||
case VK_ERROR_INVALID_SHADER_NV: return "VK_ERROR_INVALID_SHADER_NV";
|
||||
case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
|
||||
return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
|
||||
case VK_ERROR_FRAGMENTATION_EXT: return "VK_ERROR_FRAGMENTATION_EXT";
|
||||
case VK_ERROR_NOT_PERMITTED_EXT: return "VK_ERROR_NOT_PERMITTED_EXT";
|
||||
case VK_ERROR_OUT_OF_POOL_MEMORY_KHR: return "VK_ERROR_OUT_OF_POOL_MEMORY_KHR";
|
||||
case VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR: return "VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR";
|
||||
case VK_RESULT_MAX_ENUM: return "VK_RESULT_MAX_ENUM";
|
||||
case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS";
|
||||
case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
|
||||
case VK_ERROR_UNKNOWN: return "VK_ERROR_UNKNOWN";
|
||||
case VK_THREAD_IDLE_KHR: return "VK_THREAD_IDLE_KHR";
|
||||
case VK_THREAD_DONE_KHR: return "VK_THREAD_DONE_KHR";
|
||||
case VK_OPERATION_DEFERRED_KHR: return "VK_OPERATION_DEFERRED_KHR";
|
||||
case VK_OPERATION_NOT_DEFERRED_KHR: return "VK_OPERATION_NOT_DEFERRED_KHR";
|
||||
case VK_PIPELINE_COMPILE_REQUIRED_EXT: return "VK_PIPELINE_COMPILE_REQUIRED_EXT";
|
||||
case VK_ERROR_COMPRESSION_EXHAUSTED_EXT: return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT";
|
||||
case VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR: return "VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR";
|
||||
case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR";
|
||||
case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR";
|
||||
case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR";
|
||||
case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR";
|
||||
case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR";
|
||||
case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR";
|
||||
case VK_INCOMPATIBLE_SHADER_BINARY_EXT: return "VK_INCOMPATIBLE_SHADER_BINARY_EXT";
|
||||
case VK_PIPELINE_BINARY_MISSING_KHR: return "VK_PIPELINE_BINARY_MISSING_KHR";
|
||||
case VK_ERROR_NOT_ENOUGH_SPACE_KHR: return "VK_ERROR_NOT_ENOUGH_SPACE_KHR";
|
||||
}
|
||||
UNREACHABLE();
|
||||
return "Unknown result";
|
||||
}
|
||||
|
||||
// static
|
||||
VkFormat VulkanFormatUnpacker::m_bestDepthFormat = VK_FORMAT_UNDEFINED;
|
||||
|
||||
// static
|
||||
bool VulkanFormatUnpacker::Init(VkPhysicalDevice gpu)
|
||||
{
|
||||
std::array<VkFormat, 3> depthFormats = {{VK_FORMAT_D32_SFLOAT, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D16_UNORM}};
|
||||
VkFormatProperties formatProperties;
|
||||
for (auto depthFormat : depthFormats)
|
||||
{
|
||||
vkGetPhysicalDeviceFormatProperties(gpu, depthFormat, &formatProperties);
|
||||
if (formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
|
||||
{
|
||||
m_bestDepthFormat = depthFormat;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (m_bestDepthFormat == VK_FORMAT_UNDEFINED)
|
||||
{
|
||||
LOG(LWARNING, ("Vulkan error: there is no any supported depth format."));
|
||||
return false;
|
||||
}
|
||||
|
||||
vkGetPhysicalDeviceFormatProperties(gpu, Unpack(TextureFormat::DepthStencil), &formatProperties);
|
||||
if (!(formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
|
||||
{
|
||||
gUse32bitDepth8bitStencil = true;
|
||||
vkGetPhysicalDeviceFormatProperties(gpu, Unpack(TextureFormat::DepthStencil), &formatProperties);
|
||||
if (!(formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
|
||||
{
|
||||
LOG(LWARNING, ("Vulkan error: depth-stencil format is unsupported."));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
std::array<VkFormat, 2> framebufferColorFormats = {{Unpack(TextureFormat::RGBA8), Unpack(TextureFormat::RedGreen)}};
|
||||
for (auto colorFormat : framebufferColorFormats)
|
||||
{
|
||||
vkGetPhysicalDeviceFormatProperties(gpu, colorFormat, &formatProperties);
|
||||
if (!(formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
|
||||
{
|
||||
LOG(LWARNING, ("Vulkan error: framebuffer format", colorFormat, "is unsupported."));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// static
|
||||
VkFormat VulkanFormatUnpacker::Unpack(TextureFormat format)
|
||||
{
|
||||
switch (format)
|
||||
{
|
||||
case TextureFormat::RGBA8: return VK_FORMAT_R8G8B8A8_UNORM;
|
||||
case TextureFormat::Red: return VK_FORMAT_R8_UNORM;
|
||||
case TextureFormat::RedGreen: return VK_FORMAT_R8G8_UNORM;
|
||||
#if defined(OMIM_OS_MAC)
|
||||
case TextureFormat::DepthStencil: return VK_FORMAT_D32_SFLOAT_S8_UINT;
|
||||
#else
|
||||
case TextureFormat::DepthStencil:
|
||||
return gUse32bitDepth8bitStencil ? VK_FORMAT_D32_SFLOAT_S8_UINT : VK_FORMAT_D24_UNORM_S8_UINT;
|
||||
#endif
|
||||
case TextureFormat::Depth: return m_bestDepthFormat;
|
||||
case TextureFormat::Unspecified: CHECK(false, ()); return VK_FORMAT_UNDEFINED;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
SamplerKey::SamplerKey(TextureFilter filter, TextureWrapping wrapSMode, TextureWrapping wrapTMode)
|
||||
{
|
||||
Set(filter, wrapSMode, wrapTMode);
|
||||
}
|
||||
|
||||
void SamplerKey::Set(TextureFilter filter, TextureWrapping wrapSMode, TextureWrapping wrapTMode)
|
||||
{
|
||||
SetStateByte(m_sampler, static_cast<uint8_t>(filter), kMinFilterByte);
|
||||
SetStateByte(m_sampler, static_cast<uint8_t>(filter), kMagFilterByte);
|
||||
SetStateByte(m_sampler, static_cast<uint8_t>(wrapSMode), kWrapSModeByte);
|
||||
SetStateByte(m_sampler, static_cast<uint8_t>(wrapTMode), kWrapTModeByte);
|
||||
}
|
||||
|
||||
TextureFilter SamplerKey::GetTextureFilter() const
|
||||
{
|
||||
return static_cast<TextureFilter>(GetStateByte(m_sampler, kMinFilterByte));
|
||||
}
|
||||
|
||||
TextureWrapping SamplerKey::GetWrapSMode() const
|
||||
{
|
||||
return static_cast<TextureWrapping>(GetStateByte(m_sampler, kWrapSModeByte));
|
||||
}
|
||||
|
||||
TextureWrapping SamplerKey::GetWrapTMode() const
|
||||
{
|
||||
return static_cast<TextureWrapping>(GetStateByte(m_sampler, kWrapTModeByte));
|
||||
}
|
||||
|
||||
bool SamplerKey::operator<(SamplerKey const & rhs) const
|
||||
{
|
||||
return m_sampler < rhs.m_sampler;
|
||||
}
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
119
libs/drape/vulkan/vulkan_utils.hpp
Normal file
119
libs/drape/vulkan/vulkan_utils.hpp
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
#pragma once
|
||||
|
||||
#include "drape/texture_types.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/logging.hpp"
|
||||
|
||||
#include <vulkan_wrapper.h>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
extern std::string GetVulkanResultString(VkResult result);
|
||||
|
||||
uint32_t constexpr kMaxInflightFrames = 2;
|
||||
|
||||
class VulkanFormatUnpacker
|
||||
{
|
||||
public:
|
||||
static bool Init(VkPhysicalDevice gpu);
|
||||
static VkFormat Unpack(TextureFormat format);
|
||||
|
||||
private:
|
||||
static VkFormat m_bestDepthFormat;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
void SetStateByte(T & state, uint8_t value, uint8_t byteNumber)
|
||||
{
|
||||
auto const shift = byteNumber * 8;
|
||||
auto const mask = ~(static_cast<T>(0xFF) << shift);
|
||||
state = (state & mask) | (static_cast<T>(value) << shift);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
uint8_t GetStateByte(T & state, uint8_t byteNumber)
|
||||
{
|
||||
return static_cast<uint8_t>((state >> byteNumber * 8) & 0xFF);
|
||||
}
|
||||
|
||||
struct SamplerKey
|
||||
{
|
||||
SamplerKey() = default;
|
||||
SamplerKey(TextureFilter filter, TextureWrapping wrapSMode, TextureWrapping wrapTMode);
|
||||
void Set(TextureFilter filter, TextureWrapping wrapSMode, TextureWrapping wrapTMode);
|
||||
TextureFilter GetTextureFilter() const;
|
||||
TextureWrapping GetWrapSMode() const;
|
||||
TextureWrapping GetWrapTMode() const;
|
||||
bool operator<(SamplerKey const & rhs) const;
|
||||
|
||||
uint32_t m_sampler = 0;
|
||||
};
|
||||
|
||||
class DebugName
|
||||
{
|
||||
public:
|
||||
static void Init(VkInstance instance, VkDevice device);
|
||||
static void Set(VkObjectType type, uint64_t handle, char const * name);
|
||||
|
||||
private:
|
||||
static VkDevice m_device;
|
||||
static PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
|
||||
};
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace dp
|
||||
|
||||
#define LOG_ERROR_VK_CALL(method, statusCode) \
|
||||
LOG(LDEBUG, ("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode)))
|
||||
|
||||
#define LOG_ERROR_VK(message) LOG(LDEBUG, ("Vulkan error:", message))
|
||||
|
||||
#define CHECK_VK_CALL(method) \
|
||||
do \
|
||||
{ \
|
||||
VkResult const statusCode = method; \
|
||||
CHECK(statusCode == VK_SUCCESS, \
|
||||
("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode))); \
|
||||
} \
|
||||
while (false)
|
||||
|
||||
#define CHECK_VK_CALL_EX(method, msg) \
|
||||
do \
|
||||
{ \
|
||||
VkResult const statusCode = method; \
|
||||
CHECK_EQUAL(statusCode, VK_SUCCESS, msg); \
|
||||
} \
|
||||
while (false)
|
||||
|
||||
#define CHECK_RESULT_VK_CALL(method, statusCode) \
|
||||
do \
|
||||
{ \
|
||||
CHECK(statusCode == VK_SUCCESS, \
|
||||
("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode))); \
|
||||
} \
|
||||
while (false)
|
||||
|
||||
#if defined(OMIM_OS_MAC) || defined(OMIM_OS_LINUX)
|
||||
#define INIT_DEBUG_NAME_VK(instance, device) \
|
||||
do \
|
||||
{ \
|
||||
DebugName::Init(instance, device); \
|
||||
} \
|
||||
while (false)
|
||||
|
||||
#define SET_DEBUG_NAME_VK(type, handle, name) \
|
||||
do \
|
||||
{ \
|
||||
DebugName::Set(type, (uint64_t)handle, name); \
|
||||
} \
|
||||
while (false)
|
||||
#else
|
||||
#define INIT_DEBUG_NAME_VK(instance, device)
|
||||
#define SET_DEBUG_NAME_VK(type, handle, name)
|
||||
#endif
|
||||
114
libs/drape/vulkan/vulkan_vertex_array_buffer_impl.cpp
Normal file
114
libs/drape/vulkan/vulkan_vertex_array_buffer_impl.cpp
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
#include "drape/pointers.hpp"
|
||||
#include "drape/vertex_array_buffer.hpp"
|
||||
#include "drape/vulkan/vulkan_base_context.hpp"
|
||||
#include "drape/vulkan/vulkan_gpu_buffer_impl.hpp"
|
||||
#include "drape/vulkan/vulkan_param_descriptor.hpp"
|
||||
#include "drape/vulkan/vulkan_utils.hpp"
|
||||
|
||||
#include "base/assert.hpp"
|
||||
#include "base/macros.hpp"
|
||||
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace dp
|
||||
{
|
||||
namespace vulkan
|
||||
{
|
||||
class VulkanVertexArrayBufferImpl : public VertexArrayBufferImpl
|
||||
{
|
||||
public:
|
||||
VulkanVertexArrayBufferImpl(ref_ptr<VertexArrayBuffer> buffer, ref_ptr<VulkanObjectManager> objectManager,
|
||||
BindingInfoArray && bindingInfo, uint8_t bindingInfoCount)
|
||||
: m_vertexArrayBuffer(std::move(buffer))
|
||||
, m_objectManager(objectManager)
|
||||
, m_bindingInfo(std::move(bindingInfo))
|
||||
, m_bindingInfoCount(bindingInfoCount)
|
||||
, m_descriptorUpdater(objectManager)
|
||||
{}
|
||||
|
||||
~VulkanVertexArrayBufferImpl() override { m_descriptorUpdater.Destroy(); }
|
||||
|
||||
bool Build(ref_ptr<GpuProgram> program) override
|
||||
{
|
||||
UNUSED_VALUE(program);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Bind() override { return true; }
|
||||
void Unbind() override {}
|
||||
void BindBuffers(dp::BuffersMap const & buffers) const override {}
|
||||
|
||||
void RenderRange(ref_ptr<GraphicsContext> context, bool drawAsLine, IndicesRange const & range) override
|
||||
{
|
||||
CHECK(m_vertexArrayBuffer->HasBuffers(), ());
|
||||
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
|
||||
CHECK(commandBuffer != nullptr, ());
|
||||
|
||||
vulkanContext->SetPrimitiveTopology(drawAsLine ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST
|
||||
: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
|
||||
vulkanContext->SetBindingInfo(m_bindingInfo, m_bindingInfoCount);
|
||||
|
||||
m_descriptorUpdater.Update(context);
|
||||
auto descriptorSet = m_descriptorUpdater.GetDescriptorSet();
|
||||
|
||||
uint32_t dynamicOffset = vulkanContext->GetCurrentDynamicBufferOffset();
|
||||
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipelineLayout(),
|
||||
0, 1, &descriptorSet, 1, &dynamicOffset);
|
||||
|
||||
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipeline());
|
||||
|
||||
size_t constexpr kMaxBuffersCount = 4;
|
||||
std::array<VkBuffer, kMaxBuffersCount> buffers = {};
|
||||
std::array<VkDeviceSize, kMaxBuffersCount> offsets = {};
|
||||
|
||||
uint32_t bufferIndex = 0;
|
||||
for (auto & buffer : m_vertexArrayBuffer->m_staticBuffers)
|
||||
{
|
||||
ref_ptr<VulkanGpuBufferImpl> b = buffer.second->GetBuffer();
|
||||
CHECK_LESS(bufferIndex, kMaxBuffersCount, ());
|
||||
buffers[bufferIndex++] = b->GetVulkanBuffer();
|
||||
}
|
||||
for (auto & buffer : m_vertexArrayBuffer->m_dynamicBuffers)
|
||||
{
|
||||
ref_ptr<VulkanGpuBufferImpl> b = buffer.second->GetBuffer();
|
||||
CHECK_LESS(bufferIndex, kMaxBuffersCount, ());
|
||||
buffers[bufferIndex++] = b->GetVulkanBuffer();
|
||||
}
|
||||
vkCmdBindVertexBuffers(commandBuffer, 0, bufferIndex, buffers.data(), offsets.data());
|
||||
|
||||
ref_ptr<VulkanGpuBufferImpl> ib = m_vertexArrayBuffer->m_indexBuffer->GetBuffer();
|
||||
VkBuffer vulkanIndexBuffer = ib->GetVulkanBuffer();
|
||||
auto const indexType = dp::IndexStorage::IsSupported32bit() ? VK_INDEX_TYPE_UINT32 : VK_INDEX_TYPE_UINT16;
|
||||
vkCmdBindIndexBuffer(commandBuffer, vulkanIndexBuffer, 0, indexType);
|
||||
|
||||
CHECK_LESS_OR_EQUAL(range.m_idxStart + range.m_idxCount,
|
||||
m_objectManager->GetMemoryManager().GetDeviceLimits().maxDrawIndexedIndexValue, ());
|
||||
|
||||
vkCmdDrawIndexed(commandBuffer, range.m_idxCount, 1, range.m_idxStart, 0, 0);
|
||||
}
|
||||
|
||||
private:
|
||||
ref_ptr<VertexArrayBuffer> m_vertexArrayBuffer;
|
||||
ref_ptr<VulkanObjectManager> m_objectManager;
|
||||
BindingInfoArray m_bindingInfo;
|
||||
uint8_t m_bindingInfoCount = 0;
|
||||
ParamDescriptorUpdater m_descriptorUpdater;
|
||||
};
|
||||
} // namespace vulkan
|
||||
|
||||
drape_ptr<VertexArrayBufferImpl> VertexArrayBuffer::CreateImplForVulkan(ref_ptr<GraphicsContext> context,
|
||||
ref_ptr<VertexArrayBuffer> buffer,
|
||||
BindingInfoArray && bindingInfo,
|
||||
uint8_t bindingInfoCount)
|
||||
{
|
||||
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
|
||||
return make_unique_dp<vulkan::VulkanVertexArrayBufferImpl>(buffer, vulkanContext->GetObjectManager(),
|
||||
std::move(bindingInfo), bindingInfoCount);
|
||||
}
|
||||
} // namespace dp
|
||||
Loading…
Add table
Add a link
Reference in a new issue