Format all C++ and Java code via clang-format

Signed-off-by: Konstantin Pastbin <konstantin.pastbin@gmail.com>
This commit is contained in:
Konstantin Pastbin
2025-08-17 14:32:37 +07:00
parent 9f0290c0ec
commit bfffa1fff4
2169 changed files with 56441 additions and 64188 deletions

View File

@@ -30,11 +30,9 @@ uint16_t PackAttachmentsOperations(VulkanBaseContext::AttachmentsOperations cons
} // namespace
VulkanBaseContext::VulkanBaseContext(VkInstance vulkanInstance, VkPhysicalDevice gpu,
VkPhysicalDeviceProperties const & gpuProperties,
VkDevice device, uint32_t renderingQueueFamilyIndex,
ref_ptr<VulkanObjectManager> objectManager,
drape_ptr<VulkanPipeline> && pipeline,
bool hasPartialTextureUpdates)
VkPhysicalDeviceProperties const & gpuProperties, VkDevice device,
uint32_t renderingQueueFamilyIndex, ref_ptr<VulkanObjectManager> objectManager,
drape_ptr<VulkanPipeline> && pipeline, bool hasPartialTextureUpdates)
: m_vulkanInstance(vulkanInstance)
, m_gpu(gpu)
, m_gpuProperties(gpuProperties)
@@ -74,16 +72,17 @@ std::string VulkanBaseContext::GetRendererName() const
std::string VulkanBaseContext::GetRendererVersion() const
{
std::ostringstream ss;
ss << "API:" << VK_VERSION_MAJOR(m_gpuProperties.apiVersion) << "."
<< VK_VERSION_MINOR(m_gpuProperties.apiVersion) << "."
<< VK_VERSION_PATCH(m_gpuProperties.apiVersion)
ss << "API:" << VK_VERSION_MAJOR(m_gpuProperties.apiVersion) << "." << VK_VERSION_MINOR(m_gpuProperties.apiVersion)
<< "." << VK_VERSION_PATCH(m_gpuProperties.apiVersion)
<< "/Driver:" << VK_VERSION_MAJOR(m_gpuProperties.driverVersion) << "."
<< VK_VERSION_MINOR(m_gpuProperties.driverVersion) << "."
<< VK_VERSION_PATCH(m_gpuProperties.driverVersion);
<< VK_VERSION_MINOR(m_gpuProperties.driverVersion) << "." << VK_VERSION_PATCH(m_gpuProperties.driverVersion);
return ss.str();
}
bool VulkanBaseContext::HasPartialTextureUpdates() const { return m_hasPartialTextureUpdates; }
bool VulkanBaseContext::HasPartialTextureUpdates() const
{
return m_hasPartialTextureUpdates;
}
void VulkanBaseContext::Init(ApiVersion apiVersion)
{
@@ -145,8 +144,7 @@ void VulkanBaseContext::SetRenderingQueue(VkQueue queue)
void VulkanBaseContext::Resize(int w, int h)
{
if (m_swapchain != VK_NULL_HANDLE &&
m_surfaceCapabilities.currentExtent.width == static_cast<uint32_t>(w) &&
if (m_swapchain != VK_NULL_HANDLE && m_surfaceCapabilities.currentExtent.width == static_cast<uint32_t>(w) &&
m_surfaceCapabilities.currentExtent.height == static_cast<uint32_t>(h))
{
return;
@@ -164,8 +162,7 @@ bool VulkanBaseContext::BeginRendering()
// We wait for the fences no longer than kTimeoutNanoseconds. If timer is expired skip
// the frame. It helps to prevent freeze on vkWaitForFences in the case of resetting surface.
uint64_t constexpr kTimeoutNanoseconds = 2 * 1000 * 1000 * 1000;
auto res = vkWaitForFences(m_device, 1, &m_fences[m_inflightFrameIndex], VK_TRUE,
kTimeoutNanoseconds);
auto res = vkWaitForFences(m_device, 1, &m_fences[m_inflightFrameIndex], VK_TRUE, kTimeoutNanoseconds);
if (res == VK_TIMEOUT)
return false;
@@ -195,8 +192,7 @@ bool VulkanBaseContext::BeginRendering()
// "vkAcquireNextImageKHR: non-infinite timeouts not yet implemented"
// https://android.googlesource.com/platform/frameworks/native/+/refs/heads/master/vulkan/libvulkan/swapchain.cpp
res = vkAcquireNextImageKHR(m_device, m_swapchain, std::numeric_limits<uint64_t>::max() /* kTimeoutNanoseconds */,
m_acquireSemaphores[m_inflightFrameIndex],
VK_NULL_HANDLE, &m_imageIndex);
m_acquireSemaphores[m_inflightFrameIndex], VK_NULL_HANDLE, &m_imageIndex);
// VK_ERROR_SURFACE_LOST_KHR appears sometimes after getting foreground. We suppose rendering can be recovered
// next frame.
if (res == VK_TIMEOUT || res == VK_ERROR_SURFACE_LOST_KHR)
@@ -207,13 +203,13 @@ bool VulkanBaseContext::BeginRendering()
return false;
}
#if defined(OMIM_OS_MAC)
// MoltenVK returns VK_SUBOPTIMAL_KHR in our configuration, it means that window is not resized that's expected
// in the developer sandbox for macOS
// https://github.com/KhronosGroup/MoltenVK/issues/1753
if (res == VK_SUBOPTIMAL_KHR)
res = VK_SUCCESS;
#endif
#if defined(OMIM_OS_MAC)
// MoltenVK returns VK_SUBOPTIMAL_KHR in our configuration, it means that window is not resized that's expected
// in the developer sandbox for macOS
// https://github.com/KhronosGroup/MoltenVK/issues/1753
if (res == VK_SUBOPTIMAL_KHR)
res = VK_SUCCESS;
#endif
if (res == VK_ERROR_OUT_OF_DATE_KHR || res == VK_SUBOPTIMAL_KHR)
{
@@ -235,10 +231,8 @@ bool VulkanBaseContext::BeginRendering()
commandBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
commandBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
CHECK_VK_CALL(vkBeginCommandBuffer(m_memoryCommandBuffers[m_inflightFrameIndex],
&commandBufferBeginInfo));
CHECK_VK_CALL(vkBeginCommandBuffer(m_renderingCommandBuffers[m_inflightFrameIndex],
&commandBufferBeginInfo));
CHECK_VK_CALL(vkBeginCommandBuffer(m_memoryCommandBuffers[m_inflightFrameIndex], &commandBufferBeginInfo));
CHECK_VK_CALL(vkBeginCommandBuffer(m_renderingCommandBuffers[m_inflightFrameIndex], &commandBufferBeginInfo));
return true;
}
@@ -297,10 +291,9 @@ void VulkanBaseContext::SetFramebuffer(ref_ptr<dp::BaseFramebuffer> framebuffer)
ref_ptr<VulkanTexture> tex = fb->GetTexture()->GetHardwareTexture();
// Allow to use framebuffer in the fragment shader in the next pass.
tex->MakeImageLayoutTransition(m_renderingCommandBuffers[m_inflightFrameIndex],
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
tex->MakeImageLayoutTransition(
m_renderingCommandBuffers[m_inflightFrameIndex], VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
}
}
@@ -353,11 +346,10 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
depthFormat = VulkanFormatUnpacker::Unpack(TextureFormat::Depth);
fbData.m_packedAttachmentOperations = packedAttachmentOperations;
fbData.m_renderPass = CreateRenderPass(2 /* attachmentsCount */, attachmentsOp,
colorFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
depthFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
fbData.m_renderPass =
CreateRenderPass(2 /* attachmentsCount */, attachmentsOp, colorFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, depthFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
}
else
{
@@ -375,14 +367,13 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
}
fbData.m_packedAttachmentOperations = packedAttachmentOperations;
fbData.m_renderPass = CreateRenderPass(attachmentsCount, attachmentsOp, colorFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
depthFormat, initialDepthStencilLayout,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
fbData.m_renderPass =
CreateRenderPass(attachmentsCount, attachmentsOp, colorFormat, VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, depthFormat, initialDepthStencilLayout,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
}
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_RENDER_PASS, fbData.m_renderPass,
("RP: " + framebufferLabel).c_str());
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_RENDER_PASS, fbData.m_renderPass, ("RP: " + framebufferLabel).c_str());
}
// Initialize framebuffers.
@@ -410,17 +401,15 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
for (size_t i = 0; i < fbData.m_framebuffers.size(); ++i)
{
attachmentViews[0] = m_swapchainImageViews[i];
CHECK_VK_CALL(vkCreateFramebuffer(m_device, &frameBufferCreateInfo, nullptr,
&fbData.m_framebuffers[i]));
CHECK_VK_CALL(vkCreateFramebuffer(m_device, &frameBufferCreateInfo, nullptr, &fbData.m_framebuffers[i]));
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_FRAMEBUFFER, fbData.m_framebuffers[i],
("FB: " + framebufferLabel + std::to_string(i)).c_str());
("FB: " + framebufferLabel + std::to_string(i)).c_str());
}
}
else
{
ref_ptr<dp::Framebuffer> framebuffer = m_currentFramebuffer;
framebuffer->SetSize(this, m_surfaceCapabilities.currentExtent.width,
m_surfaceCapabilities.currentExtent.height);
framebuffer->SetSize(this, m_surfaceCapabilities.currentExtent.width, m_surfaceCapabilities.currentExtent.height);
auto const depthStencilRef = framebuffer->GetDepthStencilRef();
auto const attachmentsCount = (depthStencilRef != nullptr) ? 2 : 1;
@@ -450,8 +439,7 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
fbData.m_framebuffers.resize(1);
CHECK_VK_CALL(vkCreateFramebuffer(m_device, &frameBufferCreateInfo, nullptr, &fbData.m_framebuffers[0]));
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_FRAMEBUFFER, fbData.m_framebuffers[0],
("FB: " + framebufferLabel).c_str());
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_FRAMEBUFFER, fbData.m_framebuffers[0], ("FB: " + framebufferLabel).c_str());
}
}
@@ -460,17 +448,16 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
{
ref_ptr<dp::Framebuffer> framebuffer = m_currentFramebuffer;
ref_ptr<VulkanTexture> tex = framebuffer->GetTexture()->GetHardwareTexture();
tex->MakeImageLayoutTransition(m_renderingCommandBuffers[m_inflightFrameIndex],
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
tex->MakeImageLayoutTransition(
m_renderingCommandBuffers[m_inflightFrameIndex], VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
if (auto ds = framebuffer->GetDepthStencilRef())
{
ref_ptr<VulkanTexture> dsTex = ds->GetTexture()->GetHardwareTexture();
dsTex->MakeImageLayoutTransition(m_renderingCommandBuffers[m_inflightFrameIndex],
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT);
dsTex->MakeImageLayoutTransition(
m_renderingCommandBuffers[m_inflightFrameIndex], VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT);
}
}
else
@@ -490,21 +477,20 @@ void VulkanBaseContext::ApplyFramebuffer(std::string const & framebufferLabel)
imageMemoryBarrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
imageMemoryBarrier.subresourceRange.baseArrayLayer = 0;
imageMemoryBarrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
vkCmdPipelineBarrier(m_renderingCommandBuffers[m_inflightFrameIndex],
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0,
nullptr, 0, nullptr, 1, &imageMemoryBarrier);
vkCmdPipelineBarrier(m_renderingCommandBuffers[m_inflightFrameIndex], VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &imageMemoryBarrier);
m_depthTexture->MakeImageLayoutTransition(m_renderingCommandBuffers[m_inflightFrameIndex],
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT);
m_depthTexture->MakeImageLayoutTransition(
m_renderingCommandBuffers[m_inflightFrameIndex], VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT);
}
m_pipelineKey.m_renderPass = fbData.m_renderPass;
VkClearValue clearValues[2];
clearValues[0].color = {{m_clearColor.GetRedF(), m_clearColor.GetGreenF(), m_clearColor.GetBlueF(),
m_clearColor.GetAlphaF()}};
clearValues[0].color = {
{m_clearColor.GetRedF(), m_clearColor.GetGreenF(), m_clearColor.GetBlueF(), m_clearColor.GetAlphaF()}};
clearValues[1].depthStencil = {1.0f, 0};
VkRenderPassBeginInfo renderPassBeginInfo = {};
@@ -536,11 +522,8 @@ void VulkanBaseContext::Present()
presentInfo.waitSemaphoreCount = 1;
auto const res = vkQueuePresentKHR(m_queue, &presentInfo);
if (res != VK_SUCCESS && res != VK_SUBOPTIMAL_KHR &&
res != VK_ERROR_OUT_OF_DATE_KHR && res != VK_ERROR_DEVICE_LOST)
{
if (res != VK_SUCCESS && res != VK_SUBOPTIMAL_KHR && res != VK_ERROR_OUT_OF_DATE_KHR && res != VK_ERROR_DEVICE_LOST)
CHECK_RESULT_VK_CALL(vkQueuePresentKHR, res);
}
}
m_inflightFrameIndex = (m_inflightFrameIndex + 1) % kMaxInflightFrames;
@@ -570,10 +553,8 @@ void VulkanBaseContext::UnregisterHandler(uint32_t id)
for (size_t i = 0; i < m_handlers.size(); ++i)
{
m_handlers[i].erase(std::remove_if(m_handlers[i].begin(), m_handlers[i].end(),
[id](std::pair<uint8_t, ContextHandler> const & p)
{
return p.first == id;
}), m_handlers[i].end());
[id](std::pair<uint8_t, ContextHandler> const & p) { return p.first == id; }),
m_handlers[i].end());
}
}
@@ -609,8 +590,8 @@ void VulkanBaseContext::Clear(uint32_t clearBits, uint32_t storeBits)
VkClearAttachment attachment = {};
attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
attachment.colorAttachment = 0;
attachment.clearValue.color = {{m_clearColor.GetRedF(), m_clearColor.GetGreenF(),
m_clearColor.GetBlueF(), m_clearColor.GetAlphaF()}};
attachment.clearValue.color = {
{m_clearColor.GetRedF(), m_clearColor.GetGreenF(), m_clearColor.GetBlueF(), m_clearColor.GetAlphaF()}};
CHECK_LESS(attachmentsCount, kMaxClearAttachment, ());
attachments[attachmentsCount++] = std::move(attachment);
}
@@ -627,8 +608,8 @@ void VulkanBaseContext::Clear(uint32_t clearBits, uint32_t storeBits)
}
}
vkCmdClearAttachments(m_renderingCommandBuffers[m_inflightFrameIndex], attachmentsCount,
attachments.data(), 1 /* rectCount */, &clearRect);
vkCmdClearAttachments(m_renderingCommandBuffers[m_inflightFrameIndex], attachmentsCount, attachments.data(),
1 /* rectCount */, &clearRect);
}
else
{
@@ -646,20 +627,20 @@ VulkanBaseContext::AttachmentsOperations VulkanBaseContext::GetAttachmensOperati
if (m_clearBits & ClearBits::ColorBit)
operations.m_color.m_loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
else
operations.m_color.m_loadOp = (m_storeBits & ClearBits::ColorBit) ? VK_ATTACHMENT_LOAD_OP_LOAD
: VK_ATTACHMENT_LOAD_OP_DONT_CARE;
operations.m_color.m_loadOp =
(m_storeBits & ClearBits::ColorBit) ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
if (m_clearBits & ClearBits::DepthBit)
operations.m_depth.m_loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
else
operations.m_depth.m_loadOp = (m_storeBits & ClearBits::DepthBit) ? VK_ATTACHMENT_LOAD_OP_LOAD
: VK_ATTACHMENT_LOAD_OP_DONT_CARE;
operations.m_depth.m_loadOp =
(m_storeBits & ClearBits::DepthBit) ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
if (m_clearBits & ClearBits::StencilBit)
operations.m_stencil.m_loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
else
operations.m_stencil.m_loadOp = (m_storeBits & ClearBits::StencilBit) ? VK_ATTACHMENT_LOAD_OP_LOAD
: VK_ATTACHMENT_LOAD_OP_DONT_CARE;
operations.m_stencil.m_loadOp =
(m_storeBits & ClearBits::StencilBit) ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
// Apply storing mode.
if (m_storeBits & ClearBits::ColorBit)
@@ -749,8 +730,7 @@ void VulkanBaseContext::SetPrimitiveTopology(VkPrimitiveTopology topology)
void VulkanBaseContext::SetBindingInfo(BindingInfoArray const & bindingInfo, uint8_t bindingInfoCount)
{
std::copy(bindingInfo.begin(), bindingInfo.begin() + bindingInfoCount,
m_pipelineKey.m_bindingInfo.begin());
std::copy(bindingInfo.begin(), bindingInfo.begin() + bindingInfoCount, m_pipelineKey.m_bindingInfo.begin());
m_pipelineKey.m_bindingInfoCount = bindingInfoCount;
}
@@ -806,10 +786,8 @@ VkPipelineLayout VulkanBaseContext::GetCurrentPipelineLayout() const
uint32_t VulkanBaseContext::GetCurrentDynamicBufferOffset() const
{
for (auto const & p : m_paramDescriptors)
{
if (p.m_type == ParamDescriptor::Type::DynamicUniformBuffer)
return p.m_bufferDynamicOffset;
}
CHECK(false, ("Shaders parameters are not set."));
return 0;
}
@@ -833,7 +811,7 @@ ref_ptr<VulkanStagingBuffer> VulkanBaseContext::GetDefaultStagingBuffer() const
{
return make_ref(m_defaultStagingBuffers[m_inflightFrameIndex]);
}
void VulkanBaseContext::RecreateSwapchain()
{
CHECK(m_surface.has_value(), ());
@@ -894,15 +872,14 @@ void VulkanBaseContext::RecreateSwapchain()
swapchainImageViewCI.image = m_swapchainImages[i];
swapchainImageViewCI.viewType = VK_IMAGE_VIEW_TYPE_2D;
swapchainImageViewCI.format = m_surfaceFormat->format;
swapchainImageViewCI.components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A};
swapchainImageViewCI.components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B,
VK_COMPONENT_SWIZZLE_A};
swapchainImageViewCI.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
swapchainImageViewCI.subresourceRange.baseMipLevel = 0;
swapchainImageViewCI.subresourceRange.levelCount = 1;
swapchainImageViewCI.subresourceRange.baseArrayLayer = 0;
swapchainImageViewCI.subresourceRange.layerCount = 1;
CHECK_VK_CALL(vkCreateImageView(m_device, &swapchainImageViewCI, nullptr,
&m_swapchainImageViews[i]));
CHECK_VK_CALL(vkCreateImageView(m_device, &swapchainImageViewCI, nullptr, &m_swapchainImageViews[i]));
}
}
@@ -1136,8 +1113,7 @@ VkRenderPass VulkanBaseContext::CreateRenderPass(uint32_t attachmentsCount, Atta
dependencies[2].dstSubpass = VK_SUBPASS_EXTERNAL;
dependencies[2].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependencies[2].dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
dependencies[2].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
dependencies[2].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
dependencies[2].dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
dependencies[2].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
@@ -1145,8 +1121,7 @@ VkRenderPass VulkanBaseContext::CreateRenderPass(uint32_t attachmentsCount, Atta
// Write-after-write happens because of layout transition to the final layout.
dependencies[3].srcSubpass = 0;
dependencies[3].dstSubpass = VK_SUBPASS_EXTERNAL;
dependencies[3].srcStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
dependencies[3].srcStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
dependencies[3].dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
dependencies[3].srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
dependencies[3].dstAccessMask = 0;

View File

@@ -27,9 +27,8 @@ namespace vulkan
class VulkanBaseContext : public dp::GraphicsContext
{
public:
VulkanBaseContext(VkInstance vulkanInstance, VkPhysicalDevice gpu,
VkPhysicalDeviceProperties const & gpuProperties, VkDevice device,
uint32_t renderingQueueFamilyIndex, ref_ptr<VulkanObjectManager> objectManager,
VulkanBaseContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
VkDevice device, uint32_t renderingQueueFamilyIndex, ref_ptr<VulkanObjectManager> objectManager,
drape_ptr<VulkanPipeline> && pipeline, bool hasPartialTextureUpdates);
~VulkanBaseContext() override;
@@ -65,8 +64,8 @@ public:
void SetDepthTestFunction(TestFunction depthFunction) override;
void SetStencilTestEnabled(bool enabled) override;
void SetStencilFunction(StencilFace face, TestFunction stencilFunction) override;
void SetStencilActions(StencilFace face, StencilAction stencilFailAction,
StencilAction depthFailAction, StencilAction passAction) override;
void SetStencilActions(StencilFace face, StencilAction stencilFailAction, StencilAction depthFailAction,
StencilAction passAction) override;
void SetStencilReferenceValue(uint32_t stencilReferenceValue) override;
void SetCullingEnabled(bool enabled) override;
@@ -212,8 +211,7 @@ protected:
};
std::map<ref_ptr<BaseFramebuffer>, FramebufferData> m_framebuffersData;
std::array<std::vector<std::pair<uint32_t, ContextHandler>>,
static_cast<size_t>(HandlerType::Count)> m_handlers;
std::array<std::vector<std::pair<uint32_t, ContextHandler>>, static_cast<size_t>(HandlerType::Count)> m_handlers;
VulkanPipeline::PipelineKey m_pipelineKey;
std::vector<ParamDescriptor> m_paramDescriptors;

View File

@@ -19,15 +19,13 @@ namespace
class DrawVulkanContext : public dp::vulkan::VulkanBaseContext
{
public:
DrawVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu,
VkPhysicalDeviceProperties const & gpuProperties, VkDevice device,
uint32_t renderingQueueFamilyIndex,
DrawVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
VkDevice device, uint32_t renderingQueueFamilyIndex,
ref_ptr<dp::vulkan::VulkanObjectManager> objectManager, uint32_t appVersionCode,
bool hasPartialTextureUpdates)
: dp::vulkan::VulkanBaseContext(
vulkanInstance, gpu, gpuProperties, device, renderingQueueFamilyIndex, objectManager,
make_unique_dp<dp::vulkan::VulkanPipeline>(device, appVersionCode),
hasPartialTextureUpdates)
: dp::vulkan::VulkanBaseContext(vulkanInstance, gpu, gpuProperties, device, renderingQueueFamilyIndex,
objectManager, make_unique_dp<dp::vulkan::VulkanPipeline>(device, appVersionCode),
hasPartialTextureUpdates)
{
VkQueue queue;
vkGetDeviceQueue(device, renderingQueueFamilyIndex, 0, &queue);
@@ -35,38 +33,26 @@ public:
CreateCommandPool();
}
void MakeCurrent() override
{
m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Frontend);
}
void MakeCurrent() override { m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Frontend); }
};
class UploadVulkanContext : public dp::vulkan::VulkanBaseContext
{
public:
UploadVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu,
VkPhysicalDeviceProperties const & gpuProperties, VkDevice device,
uint32_t renderingQueueFamilyIndex,
ref_ptr<dp::vulkan::VulkanObjectManager> objectManager,
bool hasPartialTextureUpdates)
: dp::vulkan::VulkanBaseContext(vulkanInstance, gpu, gpuProperties, device,
renderingQueueFamilyIndex, objectManager,
nullptr /* pipeline */, hasPartialTextureUpdates)
UploadVulkanContext(VkInstance vulkanInstance, VkPhysicalDevice gpu, VkPhysicalDeviceProperties const & gpuProperties,
VkDevice device, uint32_t renderingQueueFamilyIndex,
ref_ptr<dp::vulkan::VulkanObjectManager> objectManager, bool hasPartialTextureUpdates)
: dp::vulkan::VulkanBaseContext(vulkanInstance, gpu, gpuProperties, device, renderingQueueFamilyIndex,
objectManager, nullptr /* pipeline */, hasPartialTextureUpdates)
{}
void MakeCurrent() override
{
m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Backend);
}
void MakeCurrent() override { m_objectManager->RegisterThread(dp::vulkan::VulkanObjectManager::Backend); }
void Present() override {}
void Resize(int w, int h) override {}
void SetFramebuffer(ref_ptr<dp::BaseFramebuffer> framebuffer) override {}
void Init(dp::ApiVersion apiVersion) override
{
CHECK_EQUAL(apiVersion, dp::ApiVersion::Vulkan, ());
}
void Init(dp::ApiVersion apiVersion) override { CHECK_EQUAL(apiVersion, dp::ApiVersion::Vulkan, ()); }
void SetClearColor(dp::Color const & color) override {}
void Clear(uint32_t clearBits, uint32_t storeBits) override {}
@@ -74,12 +60,10 @@ public:
void SetDepthTestEnabled(bool enabled) override {}
void SetDepthTestFunction(dp::TestFunction depthFunction) override {}
void SetStencilTestEnabled(bool enabled) override {}
void SetStencilFunction(dp::StencilFace face,
dp::TestFunction stencilFunction) override {}
void SetStencilActions(dp::StencilFace face,
dp::StencilAction stencilFailAction,
dp::StencilAction depthFailAction,
dp::StencilAction passAction) override {}
void SetStencilFunction(dp::StencilFace face, dp::TestFunction stencilFunction) override {}
void SetStencilActions(dp::StencilFace face, dp::StencilAction stencilFailAction, dp::StencilAction depthFailAction,
dp::StencilAction passAction) override
{}
};
} // namespace
@@ -120,7 +104,7 @@ VulkanContextFactory::VulkanContextFactory(uint32_t appVersionCode, int sdkVersi
// Enable extra validation features.
VkValidationFeaturesEXT validationFeatures = {};
const VkValidationFeatureEnableEXT validationFeaturesEnabled[] = {
VkValidationFeatureEnableEXT const validationFeaturesEnabled[] = {
VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT};
if (m_layers->IsValidationFeaturesEnabled())
{
@@ -165,8 +149,8 @@ VulkanContextFactory::VulkanContextFactory(uint32_t appVersionCode, int sdkVersi
dp::SupportManager::Version driverVersion{VK_VERSION_MAJOR(gpuProperties.driverVersion),
VK_VERSION_MINOR(gpuProperties.driverVersion),
VK_VERSION_PATCH(gpuProperties.driverVersion)};
if (dp::SupportManager::Instance().IsVulkanForbidden(gpuProperties.deviceName, apiVersion, driverVersion,
isCustomROM, sdkVersion))
if (dp::SupportManager::Instance().IsVulkanForbidden(gpuProperties.deviceName, apiVersion, driverVersion, isCustomROM,
sdkVersion))
{
LOG_ERROR_VK("GPU/Driver configuration is not supported.");
return;
@@ -181,15 +165,12 @@ VulkanContextFactory::VulkanContextFactory(uint32_t appVersionCode, int sdkVersi
}
std::vector<VkQueueFamilyProperties> queueFamilyProperties(queueFamilyCount);
vkGetPhysicalDeviceQueueFamilyProperties(m_gpu, &queueFamilyCount,
queueFamilyProperties.data());
vkGetPhysicalDeviceQueueFamilyProperties(m_gpu, &queueFamilyCount, queueFamilyProperties.data());
uint32_t renderingQueueFamilyIndex = 0;
for (; renderingQueueFamilyIndex < queueFamilyCount; ++renderingQueueFamilyIndex)
{
if (queueFamilyProperties[renderingQueueFamilyIndex].queueFlags & VK_QUEUE_GRAPHICS_BIT)
break;
}
if (renderingQueueFamilyIndex == queueFamilyCount)
{
LOG_ERROR_VK("Any queue family with VK_QUEUE_GRAPHICS_BIT wasn't found.");
@@ -246,20 +227,18 @@ VulkanContextFactory::VulkanContextFactory(uint32_t appVersionCode, int sdkVersi
VkPhysicalDeviceMemoryProperties memoryProperties;
vkGetPhysicalDeviceMemoryProperties(m_gpu, &memoryProperties);
m_objectManager = make_unique_dp<dp::vulkan::VulkanObjectManager>(m_device, gpuProperties.limits,
memoryProperties,
m_objectManager = make_unique_dp<dp::vulkan::VulkanObjectManager>(m_device, gpuProperties.limits, memoryProperties,
renderingQueueFamilyIndex);
bool const hasPartialTextureUpdates =
!dp::SupportManager::Instance().IsVulkanTexturePartialUpdateBuggy(
sdkVersion, gpuProperties.deviceName, apiVersion, driverVersion);
bool const hasPartialTextureUpdates = !dp::SupportManager::Instance().IsVulkanTexturePartialUpdateBuggy(
sdkVersion, gpuProperties.deviceName, apiVersion, driverVersion);
m_drawContext = make_unique_dp<DrawVulkanContext>(
m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
make_ref(m_objectManager), appVersionCode, hasPartialTextureUpdates);
m_uploadContext = make_unique_dp<UploadVulkanContext>(
m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
make_ref(m_objectManager), hasPartialTextureUpdates);
m_drawContext =
make_unique_dp<DrawVulkanContext>(m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
make_ref(m_objectManager), appVersionCode, hasPartialTextureUpdates);
m_uploadContext =
make_unique_dp<UploadVulkanContext>(m_vulkanInstance, m_gpu, gpuProperties, m_device, renderingQueueFamilyIndex,
make_ref(m_objectManager), hasPartialTextureUpdates);
}
VulkanContextFactory::~VulkanContextFactory()
@@ -314,8 +293,7 @@ void VulkanContextFactory::SetPresentAvailable(bool available)
bool VulkanContextFactory::QuerySurfaceSize()
{
auto statusCode = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_gpu, m_surface,
&m_surfaceCapabilities);
auto statusCode = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_gpu, m_surface, &m_surfaceCapabilities);
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkGetPhysicalDeviceSurfaceCapabilitiesKHR, statusCode);

View File

@@ -1,10 +1,10 @@
#pragma once
#include "drape/graphics_context_factory.hpp"
#include "drape/vulkan/vulkan_base_context.hpp"
#include "drape/vulkan/vulkan_object_manager.hpp"
#include "drape/vulkan/vulkan_layers.hpp"
#include "drape/pointers.hpp"
#include "drape/vulkan/vulkan_base_context.hpp"
#include "drape/vulkan/vulkan_layers.hpp"
#include "drape/vulkan/vulkan_object_manager.hpp"
#include <vulkan/vulkan_android.h>

View File

@@ -11,8 +11,8 @@ namespace dp
{
namespace vulkan
{
VulkanGPUBuffer::VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data,
uint8_t elementSize, uint32_t capacity, uint64_t batcherHash)
VulkanGPUBuffer::VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data, uint8_t elementSize,
uint32_t capacity, uint64_t batcherHash)
: BufferBase(elementSize, capacity)
, m_batcherHash(batcherHash)
{
@@ -26,8 +26,7 @@ VulkanGPUBuffer::~VulkanGPUBuffer()
m_objectManager->DestroyObject(m_geometryBuffer);
}
void * VulkanGPUBuffer::Map(ref_ptr<VulkanBaseContext> context, uint32_t elementOffset,
uint32_t elementCount)
void * VulkanGPUBuffer::Map(ref_ptr<VulkanBaseContext> context, uint32_t elementOffset, uint32_t elementCount)
{
CHECK(m_objectManager != nullptr, ());
@@ -54,8 +53,7 @@ void * VulkanGPUBuffer::Map(ref_ptr<VulkanBaseContext> context, uint32_t element
return data.m_pointer;
}
void VulkanGPUBuffer::UpdateData(void * gpuPtr, void const * data,
uint32_t elementOffset, uint32_t elementCount)
void VulkanGPUBuffer::UpdateData(void * gpuPtr, void const * data, uint32_t elementOffset, uint32_t elementCount)
{
CHECK(gpuPtr != nullptr, ());
CHECK(m_stagingBufferRef != nullptr, ());
@@ -70,10 +68,9 @@ void VulkanGPUBuffer::UpdateData(void * gpuPtr, void const * data,
copyRegion.srcOffset = baseSrcOffset + byteOffset;
copyRegion.size = byteCount;
m_mappingByteOffsetMin = std::min(m_mappingByteOffsetMin,
static_cast<uint32_t>(copyRegion.dstOffset));
m_mappingByteOffsetMax = std::max(m_mappingByteOffsetMax,
static_cast<uint32_t>(copyRegion.dstOffset + copyRegion.size));
m_mappingByteOffsetMin = std::min(m_mappingByteOffsetMin, static_cast<uint32_t>(copyRegion.dstOffset));
m_mappingByteOffsetMax =
std::max(m_mappingByteOffsetMax, static_cast<uint32_t>(copyRegion.dstOffset + copyRegion.size));
m_regionsToCopy.push_back(std::move(copyRegion));
}
@@ -97,8 +94,7 @@ void VulkanGPUBuffer::Unmap(ref_ptr<VulkanBaseContext> context)
VkBufferMemoryBarrier barrier = {};
barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
barrier.pNext = nullptr;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT |
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDEX_READ_BIT;
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDEX_READ_BIT;
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
@@ -106,9 +102,8 @@ void VulkanGPUBuffer::Unmap(ref_ptr<VulkanBaseContext> context)
barrier.offset = m_mappingByteOffsetMin;
barrier.size = m_mappingByteOffsetMax - m_mappingByteOffsetMin;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr,
1, &barrier, 0, nullptr);
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &barrier, 0, nullptr);
// Schedule command to copy from the staging buffer to the geometry buffer.
vkCmdCopyBuffer(commandBuffer, stagingBuffer, m_geometryBuffer.m_buffer,
static_cast<uint32_t>(m_regionsToCopy.size()), m_regionsToCopy.data());
@@ -116,24 +111,22 @@ void VulkanGPUBuffer::Unmap(ref_ptr<VulkanBaseContext> context)
// Set up barriers to prevent data collisions (read-after-write).
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, nullptr,
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, nullptr,
1, &barrier, 0, nullptr);
m_mappingByteOffset = 0;
m_regionsToCopy.clear();
}
void VulkanGPUBuffer::Resize(ref_ptr<VulkanBaseContext> context, void const * data,
uint32_t elementCount)
void VulkanGPUBuffer::Resize(ref_ptr<VulkanBaseContext> context, void const * data, uint32_t elementCount)
{
BufferBase::Resize(elementCount);
m_objectManager = context->GetObjectManager();
uint32_t const sizeInBytes = GetCapacity() * GetElementSize();
m_geometryBuffer = m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry,
sizeInBytes, m_batcherHash);
m_geometryBuffer =
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, m_batcherHash);
m_objectManager->Fill(m_geometryBuffer, data, sizeInBytes);
// If we have already set up data, we have to call SetDataSize.
@@ -141,10 +134,9 @@ void VulkanGPUBuffer::Resize(ref_ptr<VulkanBaseContext> context, void const * da
SetDataSize(elementCount);
}
} // namespace vulkan
drape_ptr<DataBufferBase> DataBuffer::CreateImplForVulkan(ref_ptr<GraphicsContext> context, void const * data,
uint8_t elementSize, uint32_t capacity,
uint64_t batcherHash)
uint8_t elementSize, uint32_t capacity, uint64_t batcherHash)
{
return make_unique_dp<vulkan::VulkanGpuBufferImpl>(context, data, elementSize, capacity, batcherHash);
}

View File

@@ -22,13 +22,12 @@ class VulkanStagingBuffer;
class VulkanGPUBuffer : public BufferBase
{
public:
VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data,
uint8_t elementSize, uint32_t capacity, uint64_t batcherHash);
VulkanGPUBuffer(ref_ptr<VulkanBaseContext> context, void const * data, uint8_t elementSize, uint32_t capacity,
uint64_t batcherHash);
~VulkanGPUBuffer() override;
void * Map(ref_ptr<VulkanBaseContext> context, uint32_t elementOffset, uint32_t elementCount);
void UpdateData(void * gpuPtr, void const * data,
uint32_t elementOffset, uint32_t elementCount);
void UpdateData(void * gpuPtr, void const * data, uint32_t elementOffset, uint32_t elementCount);
void Unmap(ref_ptr<VulkanBaseContext> context);
void Advance(uint32_t elementCount) { BufferBase::UploadData(elementCount); }
@@ -50,23 +49,21 @@ protected:
uint32_t m_mappingByteOffsetMax = std::numeric_limits<uint32_t>::min();
std::vector<VkBufferCopy> m_regionsToCopy;
};
class VulkanGpuBufferImpl : public DataBufferImpl<VulkanGPUBuffer>
{
public:
template <typename... Args>
VulkanGpuBufferImpl(Args &&... params)
: DataBufferImpl(std::forward<Args>(params)...)
VulkanGpuBufferImpl(Args &&... params) : DataBufferImpl(std::forward<Args>(params)...)
{}
void const * Data() const override
{
ASSERT(false, ("Retrieving of raw data is unavailable for GPU buffer"));
return nullptr;
}
void UploadData(ref_ptr<GraphicsContext> context, void const * data,
uint32_t elementCount) override
void UploadData(ref_ptr<GraphicsContext> context, void const * data, uint32_t elementCount) override
{
// In Vulkan we must call upload only from FR.
ref_ptr<VulkanBaseContext> vulkanContext = context;
@@ -81,23 +78,18 @@ public:
m_buffer->Unmap(context);
m_buffer->Advance(elementCount);
}
void UpdateData(void * destPtr, void const * srcPtr, uint32_t elementOffset,
uint32_t elementCount) override
void UpdateData(void * destPtr, void const * srcPtr, uint32_t elementOffset, uint32_t elementCount) override
{
m_buffer->UpdateData(destPtr, srcPtr, elementOffset, elementCount);
}
void * Map(ref_ptr<GraphicsContext> context, uint32_t elementOffset,
uint32_t elementCount) override
void * Map(ref_ptr<GraphicsContext> context, uint32_t elementOffset, uint32_t elementCount) override
{
return m_buffer->Map(context, elementOffset, elementCount);
}
void Unmap(ref_ptr<GraphicsContext> context) override
{
m_buffer->Unmap(context);
}
void Unmap(ref_ptr<GraphicsContext> context) override { m_buffer->Unmap(context); }
void Bind() override {}

View File

@@ -18,12 +18,9 @@ class VulkanGpuProgram : public GpuProgram
public:
using TextureBindings = std::unordered_map<std::string, int8_t>;
VulkanGpuProgram(std::string const & programName,
VkPipelineShaderStageCreateInfo const & vertexShader,
VkPipelineShaderStageCreateInfo const & fragmentShader,
VkDescriptorSetLayout descriptorSetLayout,
VkPipelineLayout pipelineLayout,
TextureBindings const & textureBindings)
VulkanGpuProgram(std::string const & programName, VkPipelineShaderStageCreateInfo const & vertexShader,
VkPipelineShaderStageCreateInfo const & fragmentShader, VkDescriptorSetLayout descriptorSetLayout,
VkPipelineLayout pipelineLayout, TextureBindings const & textureBindings)
: GpuProgram(programName)
, m_vertexShader(vertexShader)
, m_fragmentShader(fragmentShader)
@@ -35,10 +32,7 @@ public:
void Bind() override {}
void Unbind() override {}
std::array<VkPipelineShaderStageCreateInfo, 2> GetShaders() const
{
return {{m_vertexShader, m_fragmentShader}};
}
std::array<VkPipelineShaderStageCreateInfo, 2> GetShaders() const { return {{m_vertexShader, m_fragmentShader}}; }
VkDescriptorSetLayout GetDescriptorSetLayout() const { return m_descriptorSetLayout; }

136
libs/drape/vulkan/vulkan_layers.cpp Executable file → Normal file
View File

@@ -17,34 +17,32 @@ char const * kDebugReportExtension = "VK_EXT_debug_report";
char const * kValidationFeaturesExtension = "VK_EXT_validation_features";
char const * const kInstanceExtensions[] = {
"VK_KHR_surface",
"VK_KHR_android_surface",
kDebugReportExtension,
kValidationFeaturesExtension,
"VK_KHR_surface",
"VK_KHR_android_surface",
kDebugReportExtension,
kValidationFeaturesExtension,
#if defined(OMIM_OS_MAC) || defined(OMIM_OS_LINUX)
"VK_EXT_debug_utils",
"VK_EXT_debug_utils",
#endif
#if defined(OMIM_OS_MAC)
"VK_KHR_portability_enumeration",
"VK_MVK_macos_surface",
"VK_KHR_get_physical_device_properties2",
"VK_KHR_portability_enumeration",
"VK_MVK_macos_surface",
"VK_KHR_get_physical_device_properties2",
#endif
#if defined(OMIM_OS_LINUX)
"VK_KHR_xlib_surface",
"VK_KHR_xlib_surface",
#endif
};
char const * const kDeviceExtensions[] =
{
"VK_KHR_swapchain",
char const * const kDeviceExtensions[] = {
"VK_KHR_swapchain",
#if defined(OMIM_OS_MAC)
"VK_KHR_portability_subset",
"VK_KHR_portability_subset",
#endif
};
char const * const kValidationLayers[] =
{
"VK_LAYER_KHRONOS_validation",
char const * const kValidationLayers[] = {
"VK_LAYER_KHRONOS_validation",
};
std::vector<char const *> CheckLayers(std::vector<VkLayerProperties> const & props)
@@ -53,20 +51,16 @@ std::vector<char const *> CheckLayers(std::vector<VkLayerProperties> const & pro
result.reserve(props.size());
for (uint32_t i = 0; i < ARRAY_SIZE(kValidationLayers); ++i)
{
auto const it = std::find_if(props.begin(), props.end(),
[i](VkLayerProperties const & p)
{
return strcmp(kValidationLayers[i], p.layerName) == 0;
});
auto const it = std::find_if(props.begin(), props.end(), [i](VkLayerProperties const & p)
{ return strcmp(kValidationLayers[i], p.layerName) == 0; });
if (it != props.end())
result.push_back(kValidationLayers[i]);
}
return result;
}
std::vector<char const *> CheckExtensions(std::vector<VkExtensionProperties> const & props,
bool enableDiagnostics, char const * const * extensions,
uint32_t extensionsCount)
std::vector<char const *> CheckExtensions(std::vector<VkExtensionProperties> const & props, bool enableDiagnostics,
char const * const * extensions, uint32_t extensionsCount)
{
std::vector<char const *> result;
result.reserve(props.size());
@@ -81,11 +75,8 @@ std::vector<char const *> CheckExtensions(std::vector<VkExtensionProperties> con
continue;
}
auto const it = std::find_if(props.begin(), props.end(),
[i, extensions](VkExtensionProperties const & p)
{
return strcmp(extensions[i], p.extensionName) == 0;
});
auto const it = std::find_if(props.begin(), props.end(), [i, extensions](VkExtensionProperties const & p)
{ return strcmp(extensions[i], p.extensionName) == 0; });
if (it != props.end())
result.push_back(extensions[i]);
}
@@ -124,16 +115,13 @@ std::string GetReportObjectTypeString(VkDebugReportObjectTypeEXT objectType)
case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: return "COMMAND_POOL";
case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: return "SURFACE_KHR";
case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: return "SWAPCHAIN_KHR";
case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT:
return "DEBUG_REPORT_CALLBACK_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: return "DEBUG_REPORT_CALLBACK_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: return "DISPLAY_KHR";
case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: return "DISPLAY_MODE_KHR";
case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: return "VALIDATION_CACHE_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: return "SAMPLER_YCBCR_CONVERSION";
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT:
return "DESCRIPTOR_UPDATE_TEMPLATE";
case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT:
return "ACCELERATION_STRUCTURE_NV";
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: return "DESCRIPTOR_UPDATE_TEMPLATE";
case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: return "ACCELERATION_STRUCTURE_NV";
case VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT: return "MAX_ENUM";
case VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT: return "CU_MODULE_NVX";
case VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT: return "CU_FUNCTION_NVX";
@@ -148,41 +136,31 @@ std::string GetReportObjectTypeString(VkDebugReportObjectTypeEXT objectType)
bool IsContained(char const * name, std::vector<char const *> const & collection)
{
return collection.end() != std::find_if(collection.begin(), collection.end(),
[name](char const * v) { return strcmp(name, v) == 0; });
return collection.end() !=
std::find_if(collection.begin(), collection.end(), [name](char const * v) { return strcmp(name, v) == 0; });
}
} // namespace
static VkBool32 VKAPI_PTR DebugReportCallbackImpl(VkDebugReportFlagsEXT flags,
VkDebugReportObjectTypeEXT objectType, uint64_t object,
size_t location, int32_t /*messageCode*/,
const char * pLayerPrefix, const char * pMessage,
void * /*pUserData*/)
static VkBool32 VKAPI_PTR DebugReportCallbackImpl(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType,
uint64_t object, size_t location, int32_t /*messageCode*/,
char const * pLayerPrefix, char const * pMessage,
void * /*pUserData*/)
{
auto logLevel = base::LogLevel::LINFO;
if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) ||
(flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT))
{
if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) || (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT))
logLevel = base::LogLevel::LWARNING;
}
else if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
{
logLevel = base::LogLevel::LERROR;
}
#ifdef ENABLE_VULKAN_DEBUG_DIAGNOSTICS_MESSAGES
else if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
{
logLevel = base::LogLevel::LDEBUG;
}
#else
else
{
return VK_FALSE;
}
#endif
LOG(logLevel, ("Vulkan Diagnostics [", pLayerPrefix, "] [", GetReportObjectTypeString(objectType),
"] [OBJ:", object, "LOC:", location, "]:", pMessage));
LOG(logLevel, ("Vulkan Diagnostics [", pLayerPrefix, "] [", GetReportObjectTypeString(objectType), "] [OBJ:", object,
"LOC:", location, "]:", pMessage));
return VK_FALSE;
}
@@ -235,8 +213,7 @@ Layers::Layers(bool enableDiagnostics)
if (instExtensionsCount != 0)
{
extensionsProperties.resize(instExtensionsCount);
statusCode = vkEnumerateInstanceExtensionProperties(nullptr, &instExtensionsCount,
extensionsProperties.data());
statusCode = vkEnumerateInstanceExtensionProperties(nullptr, &instExtensionsCount, extensionsProperties.data());
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateInstanceExtensionProperties, statusCode);
@@ -267,8 +244,8 @@ Layers::Layers(bool enableDiagnostics)
extensionsProperties.insert(extensionsProperties.end(), props.begin(), props.end());
}
m_instanceExtensions = CheckExtensions(extensionsProperties, m_enableDiagnostics,
kInstanceExtensions, ARRAY_SIZE(kInstanceExtensions));
m_instanceExtensions =
CheckExtensions(extensionsProperties, m_enableDiagnostics, kInstanceExtensions, ARRAY_SIZE(kInstanceExtensions));
for (auto ext : m_instanceExtensions)
{
@@ -314,8 +291,7 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
{
// Get device layers count.
uint32_t devLayerCount = 0;
auto statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount,
nullptr);
auto statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount, nullptr);
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceLayerProperties, statusCode);
@@ -327,8 +303,7 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
if (devLayerCount != 0)
{
layerProperties.resize(devLayerCount);
statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount,
layerProperties.data());
statusCode = vkEnumerateDeviceLayerProperties(physicalDevice, &devLayerCount, layerProperties.data());
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceLayerProperties, statusCode);
@@ -343,8 +318,7 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
// Get device extensions count.
uint32_t devExtensionsCount = 0;
auto statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr,
&devExtensionsCount, nullptr);
auto statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr, &devExtensionsCount, nullptr);
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
@@ -356,9 +330,8 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
if (devExtensionsCount != 0)
{
extensionsProperties.resize(devExtensionsCount);
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr,
&devExtensionsCount,
extensionsProperties.data());
statusCode =
vkEnumerateDeviceExtensionProperties(physicalDevice, nullptr, &devExtensionsCount, extensionsProperties.data());
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
@@ -370,8 +343,7 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
for (auto layerName : m_deviceLayers)
{
uint32_t cnt = 0;
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName,
&cnt, nullptr);
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName, &cnt, nullptr);
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
@@ -381,8 +353,7 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
continue;
std::vector<VkExtensionProperties> props(cnt);
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName,
&cnt, props.data());
statusCode = vkEnumerateDeviceExtensionProperties(physicalDevice, layerName, &cnt, props.data());
if (statusCode != VK_SUCCESS)
{
LOG_ERROR_VK_CALL(vkEnumerateDeviceExtensionProperties, statusCode);
@@ -391,8 +362,8 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
extensionsProperties.insert(extensionsProperties.end(), props.begin(), props.end());
}
m_deviceExtensions = CheckExtensions(extensionsProperties, m_enableDiagnostics,
kDeviceExtensions, ARRAY_SIZE(kDeviceExtensions));
m_deviceExtensions =
CheckExtensions(extensionsProperties, m_enableDiagnostics, kDeviceExtensions, ARRAY_SIZE(kDeviceExtensions));
for (auto ext : m_deviceExtensions)
LOG(LINFO, ("Vulkan device extension prepared", ext));
@@ -401,14 +372,11 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
if (m_vkCreateDebugReportCallbackEXT == nullptr)
{
m_vkCreateDebugReportCallbackEXT =
(PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance,
"vkCreateDebugReportCallbackEXT");
(PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugReportCallbackEXT");
m_vkDestroyDebugReportCallbackEXT =
(PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(instance,
"vkDestroyDebugReportCallbackEXT");
(PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugReportCallbackEXT");
m_vkDebugReportMessageEXT =
(PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(instance,
"vkDebugReportMessageEXT");
(PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(instance, "vkDebugReportMessageEXT");
}
if (m_vkCreateDebugReportCallbackEXT == nullptr)
@@ -420,10 +388,8 @@ bool Layers::Initialize(VkInstance instance, VkPhysicalDevice physicalDevice)
VkDebugReportCallbackCreateInfoEXT dbgInfo = {};
dbgInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
dbgInfo.pNext = nullptr;
dbgInfo.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
VK_DEBUG_REPORT_WARNING_BIT_EXT |
VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
VK_DEBUG_REPORT_ERROR_BIT_EXT |
dbgInfo.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT |
VK_DEBUG_REPORT_DEBUG_BIT_EXT;
dbgInfo.pfnCallback = DebugReportCallbackImpl;
dbgInfo.pUserData = nullptr;
@@ -470,9 +436,9 @@ char const * const * Layers::GetDeviceExtensions() const
return m_deviceExtensions.data();
}
bool Layers::IsValidationFeaturesEnabled() const
bool Layers::IsValidationFeaturesEnabled() const
{
return m_validationFeaturesEnabled;
return m_validationFeaturesEnabled;
}
} // namespace vulkan
} // namespace dp

2
libs/drape/vulkan/vulkan_layers.hpp Executable file → Normal file
View File

@@ -40,7 +40,7 @@ private:
std::vector<char const *> m_deviceLayers;
std::vector<char const *> m_deviceExtensions;
VkDebugReportCallbackEXT m_reportCallback {0};
VkDebugReportCallbackEXT m_reportCallback{0};
PFN_vkCreateDebugReportCallbackEXT m_vkCreateDebugReportCallbackEXT = nullptr;
PFN_vkDestroyDebugReportCallbackEXT m_vkDestroyDebugReportCallbackEXT = nullptr;

View File

@@ -13,25 +13,22 @@ namespace vulkan
{
namespace
{
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> const kMinBlockSizeInBytes =
{{
1024 * 1024, // Geometry
128 * 1024, // Uniform
0, // Staging (no minimal size)
0, // Image (no minimal size)
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> const kMinBlockSizeInBytes = {{
1024 * 1024, // Geometry
128 * 1024, // Uniform
0, // Staging (no minimal size)
0, // Image (no minimal size)
}};
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> const kDesiredSizeInBytes =
{{
80 * 1024 * 1024, // Geometry
std::numeric_limits<uint32_t>::max(), // Uniform (unlimited)
20 * 1024 * 1024, // Staging
100 * 1024 * 1024, // Image
std::array<uint32_t, VulkanMemoryManager::kResourcesCount> const kDesiredSizeInBytes = {{
80 * 1024 * 1024, // Geometry
std::numeric_limits<uint32_t>::max(), // Uniform (unlimited)
20 * 1024 * 1024, // Staging
100 * 1024 * 1024, // Image
}};
VkMemoryPropertyFlags GetMemoryPropertyFlags(
VulkanMemoryManager::ResourceType resourceType,
std::optional<VkMemoryPropertyFlags> & fallbackTypeBits)
VkMemoryPropertyFlags GetMemoryPropertyFlags(VulkanMemoryManager::ResourceType resourceType,
std::optional<VkMemoryPropertyFlags> & fallbackTypeBits)
{
switch (resourceType)
{
@@ -51,8 +48,7 @@ VkMemoryPropertyFlags GetMemoryPropertyFlags(
// No fallback.
return VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
case VulkanMemoryManager::ResourceType::Count:
CHECK(false, ());
case VulkanMemoryManager::ResourceType::Count: CHECK(false, ());
}
return 0;
}
@@ -61,18 +57,9 @@ struct LessBlockSize
{
using BlockPtrT = drape_ptr<VulkanMemoryManager::MemoryBlock>;
bool operator()(BlockPtrT const & b1, BlockPtrT const & b2) const
{
return b1->m_blockSize < b2->m_blockSize;
}
bool operator()(BlockPtrT const & b1, uint32_t b2) const
{
return b1->m_blockSize < b2;
}
bool operator()(uint32_t b1, BlockPtrT const & b2) const
{
return b1 < b2->m_blockSize;
}
bool operator()(BlockPtrT const & b1, BlockPtrT const & b2) const { return b1->m_blockSize < b2->m_blockSize; }
bool operator()(BlockPtrT const & b1, uint32_t b2) const { return b1->m_blockSize < b2; }
bool operator()(uint32_t b1, BlockPtrT const & b2) const { return b1 < b2->m_blockSize; }
};
} // namespace
@@ -105,8 +92,8 @@ VulkanMemoryManager::~VulkanMemoryManager()
ASSERT_EQUAL(m_totalAllocationCounter, 0, ());
}
std::optional<uint32_t> VulkanMemoryManager::GetMemoryTypeIndex(
uint32_t typeBits, VkMemoryPropertyFlags properties) const
std::optional<uint32_t> VulkanMemoryManager::GetMemoryTypeIndex(uint32_t typeBits,
VkMemoryPropertyFlags properties) const
{
for (uint32_t i = 0; i < m_memoryProperties.memoryTypeCount; i++)
{
@@ -130,16 +117,14 @@ uint32_t VulkanMemoryManager::GetOffsetAlignment(ResourceType resourceType) cons
return kUniformAlignment;
}
static uint32_t const kAlignment =
math::LCM(static_cast<uint32_t>(m_deviceLimits.minMemoryMapAlignment),
static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
static uint32_t const kAlignment = math::LCM(static_cast<uint32_t>(m_deviceLimits.minMemoryMapAlignment),
static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
return kAlignment;
}
uint32_t VulkanMemoryManager::GetSizeAlignment(VkMemoryRequirements const & memReqs) const
{
return math::LCM(static_cast<uint32_t>(memReqs.alignment),
static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
return math::LCM(static_cast<uint32_t>(memReqs.alignment), static_cast<uint32_t>(m_deviceLimits.nonCoherentAtomSize));
}
// static
@@ -151,8 +136,7 @@ uint32_t VulkanMemoryManager::GetAligned(uint32_t value, uint32_t alignment)
}
VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType resourceType,
VkMemoryRequirements memReqs,
uint64_t blockHash)
VkMemoryRequirements memReqs, uint64_t blockHash)
{
size_t const intResType = static_cast<size_t>(resourceType);
auto const alignedSize = GetAligned(static_cast<uint32_t>(memReqs.size), GetSizeAlignment(memReqs));
@@ -171,8 +155,7 @@ VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType re
{
block->m_freeOffset = alignedOffset + alignedSize;
block->m_allocationCounter++;
return std::make_shared<Allocation>(resourceType, blockHash, alignedOffset, alignedSize,
make_ref(block));
return std::make_shared<Allocation>(resourceType, blockHash, alignedOffset, alignedSize, make_ref(block));
}
}
@@ -191,8 +174,7 @@ VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType re
freeBlock->m_freeOffset = alignedSize;
freeBlock->m_allocationCounter++;
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize,
make_ref(freeBlock));
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize, make_ref(freeBlock));
m[blockHash].push_back(std::move(freeBlock));
return p;
@@ -222,8 +204,8 @@ VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType re
IncrementTotalAllocationsCount();
CHECK_VK_CALL_EX(vkAllocateMemory(m_device, &memAllocInfo, nullptr, &memory),
("Requested size =", blockSize, "Allocated sizes =", m_sizes, "Total allocs =", m_totalAllocationCounter,
m_memory[intResType].size(), m_freeBlocks[intResType].size()));
("Requested size =", blockSize, "Allocated sizes =", m_sizes, "Total allocs =",
m_totalAllocationCounter, m_memory[intResType].size(), m_freeBlocks[intResType].size()));
m_sizes[intResType] += blockSize;
@@ -237,8 +219,7 @@ VulkanMemoryManager::AllocationPtr VulkanMemoryManager::Allocate(ResourceType re
newBlock->m_allocationCounter++;
newBlock->m_isCoherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize,
make_ref(newBlock));
auto p = std::make_shared<Allocation>(resourceType, blockHash, 0, alignedSize, make_ref(newBlock));
m[blockHash].push_back(std::move(newBlock));
return p;
}
@@ -257,8 +238,7 @@ void VulkanMemoryManager::Deallocate(AllocationPtr ptr)
auto & m = m_memory[resourceIndex];
auto const it = m.find(ptr->m_blockHash);
CHECK(it != m.end(), ());
auto blockIt = std::find_if(it->second.begin(), it->second.end(),
[&ptr](drape_ptr<MemoryBlock> const & b)
auto blockIt = std::find_if(it->second.begin(), it->second.end(), [&ptr](drape_ptr<MemoryBlock> const & b)
{
ASSERT(ptr->m_memoryBlock != nullptr, ());
return b->m_memory == ptr->m_memoryBlock->m_memory;
@@ -335,7 +315,8 @@ void VulkanMemoryManager::EndDeallocationSession()
fm.push_back(std::move(block));
}
return true;
}), m.end());
}),
m.end());
if (m.empty())
hashesToDelete.push_back(p.first);

View File

@@ -33,8 +33,7 @@ public:
Count
};
static size_t constexpr kResourcesCount =
static_cast<uint32_t>(VulkanMemoryManager::ResourceType::Count);
static size_t constexpr kResourcesCount = static_cast<uint32_t>(VulkanMemoryManager::ResourceType::Count);
struct MemoryBlock
{
@@ -54,8 +53,8 @@ public:
ResourceType const m_resourceType;
ref_ptr<MemoryBlock> m_memoryBlock;
Allocation(ResourceType resourceType, uint64_t blockHash, uint32_t offset,
uint32_t size, ref_ptr<MemoryBlock> memoryBlock)
Allocation(ResourceType resourceType, uint64_t blockHash, uint32_t offset, uint32_t size,
ref_ptr<MemoryBlock> memoryBlock)
: m_blockHash(blockHash)
, m_alignedOffset(offset)
, m_alignedSize(size)
@@ -66,8 +65,7 @@ public:
using AllocationPtr = std::shared_ptr<Allocation>;
AllocationPtr Allocate(ResourceType resourceType, VkMemoryRequirements memReqs,
uint64_t blockHash);
AllocationPtr Allocate(ResourceType resourceType, VkMemoryRequirements memReqs, uint64_t blockHash);
void BeginDeallocationSession();
void Deallocate(AllocationPtr ptr);
void EndDeallocationSession();
@@ -79,8 +77,7 @@ public:
VkPhysicalDeviceLimits const & GetDeviceLimits() const;
private:
std::optional<uint32_t> GetMemoryTypeIndex(uint32_t typeBits,
VkMemoryPropertyFlags properties) const;
std::optional<uint32_t> GetMemoryTypeIndex(uint32_t typeBits, VkMemoryPropertyFlags properties) const;
void IncrementTotalAllocationsCount();
void DecrementTotalAllocationsCount();

View File

@@ -1,8 +1,8 @@
#include "drape/mesh_object.hpp"
#include "drape/pointers.hpp"
#include "drape/vulkan/vulkan_base_context.hpp"
#include "drape/vulkan/vulkan_staging_buffer.hpp"
#include "drape/vulkan/vulkan_param_descriptor.hpp"
#include "drape/vulkan/vulkan_staging_buffer.hpp"
#include "drape/vulkan/vulkan_utils.hpp"
#include "base/assert.hpp"
@@ -22,9 +22,9 @@ VkPrimitiveTopology GetPrimitiveType(MeshObject::DrawPrimitive primitive)
{
switch (primitive)
{
case MeshObject::DrawPrimitive::Triangles: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
case MeshObject::DrawPrimitive::TriangleStrip: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
case MeshObject::DrawPrimitive::LineStrip: return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
case MeshObject::DrawPrimitive::Triangles: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
case MeshObject::DrawPrimitive::TriangleStrip: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
case MeshObject::DrawPrimitive::LineStrip: return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
}
UNREACHABLE();
}
@@ -50,15 +50,15 @@ public:
if (sizeInBytes == 0)
continue;
m_geometryBuffers[i] = m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry,
sizeInBytes, 0 /* batcherHash */);
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_geometryBuffers[i].m_buffer,
m_geometryBuffers[i] =
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, 0 /* batcherHash */);
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_geometryBuffers[i].m_buffer,
("VB: Mesh (" + m_mesh->m_debugName + ") " + std::to_string(i)).c_str());
m_objectManager->Fill(m_geometryBuffers[i], m_mesh->m_buffers[i]->GetData(), sizeInBytes);
m_bindingInfo[i] = dp::BindingInfo(static_cast<uint8_t>(m_mesh->m_buffers[i]->m_attributes.size()),
static_cast<uint8_t>(i));
m_bindingInfo[i] =
dp::BindingInfo(static_cast<uint8_t>(m_mesh->m_buffers[i]->m_attributes.size()), static_cast<uint8_t>(i));
for (size_t j = 0; j < m_mesh->m_buffers[i]->m_attributes.size(); ++j)
{
auto const & attr = m_mesh->m_buffers[i]->m_attributes[j];
@@ -67,7 +67,7 @@ public:
binding.m_componentCount = static_cast<uint8_t>(attr.m_componentsCount);
binding.m_componentType = gl_const::GLFloatType;
binding.m_offset = static_cast<uint8_t>(attr.m_offset);
CHECK_LESS_OR_EQUAL(m_mesh->m_buffers[i]->GetStrideInBytes(),
CHECK_LESS_OR_EQUAL(m_mesh->m_buffers[i]->GetStrideInBytes(),
static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()), ());
binding.m_stride = static_cast<uint8_t>(m_mesh->m_buffers[i]->GetStrideInBytes());
}
@@ -76,9 +76,9 @@ public:
if (!m_mesh->m_indices.empty())
{
auto const sizeInBytes = static_cast<uint32_t>(m_mesh->m_indices.size() * sizeof(uint16_t));
m_indexBuffer = m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry,
sizeInBytes, 0 /* batcherHash */);
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_indexBuffer.m_buffer,
m_indexBuffer =
m_objectManager->CreateBuffer(VulkanMemoryManager::ResourceType::Geometry, sizeInBytes, 0 /* batcherHash */);
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, m_indexBuffer.m_buffer,
("IB: Mesh (" + m_mesh->m_debugName + ")").c_str());
m_objectManager->Fill(m_indexBuffer, m_mesh->m_indices.data(), sizeInBytes);
@@ -103,8 +103,7 @@ public:
auto const sizeInBytes = buffer->GetSizeInBytes();
CHECK(sizeInBytes != 0, ());
UpdateBufferInternal(context, m_geometryBuffers[bufferInd].m_buffer,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
UpdateBufferInternal(context, m_geometryBuffers[bufferInd].m_buffer, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
buffer->GetData(), sizeInBytes);
}
@@ -113,14 +112,12 @@ public:
CHECK(!m_mesh->m_indices.empty(), ());
auto const sizeInBytes = m_mesh->m_indices.size() * sizeof(uint16_t);
CHECK(m_indexBuffer.m_buffer != VK_NULL_HANDLE, ());
UpdateBufferInternal(context, m_indexBuffer.m_buffer,
VK_ACCESS_INDEX_READ_BIT,
m_mesh->m_indices.data(), sizeInBytes);
UpdateBufferInternal(context, m_indexBuffer.m_buffer, VK_ACCESS_INDEX_READ_BIT, m_mesh->m_indices.data(),
sizeInBytes);
}
void DrawPrimitives(ref_ptr<dp::GraphicsContext> context, uint32_t vertexCount,
uint32_t startVertex) override
void DrawPrimitives(ref_ptr<dp::GraphicsContext> context, uint32_t vertexCount, uint32_t startVertex) override
{
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
@@ -131,8 +128,7 @@ public:
vkCmdDraw(commandBuffer, vertexCount, 1, startVertex, 0);
}
void DrawPrimitivesIndexed(ref_ptr<dp::GraphicsContext> context, uint32_t indexCount,
uint32_t startIndex) override
void DrawPrimitivesIndexed(ref_ptr<dp::GraphicsContext> context, uint32_t indexCount, uint32_t startIndex) override
{
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
@@ -161,12 +157,10 @@ private:
auto descriptorSet = m_descriptorUpdater.GetDescriptorSet();
uint32_t dynamicOffset = vulkanContext->GetCurrentDynamicBufferOffset();
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanContext->GetCurrentPipelineLayout(), 0, 1,
&descriptorSet, 1, &dynamicOffset);
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipelineLayout(),
0, 1, &descriptorSet, 1, &dynamicOffset);
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanContext->GetCurrentPipeline());
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipeline());
buffer_vector<VkBuffer, 8> buffers;
buffer_vector<VkDeviceSize, 8> offsets;
@@ -175,12 +169,10 @@ private:
buffers.emplace_back(m_geometryBuffers[i].m_buffer);
offsets.emplace_back(0);
}
vkCmdBindVertexBuffers(commandBuffer, 0, m_geometryBuffers.size(), buffers.data(),
offsets.data());
vkCmdBindVertexBuffers(commandBuffer, 0, m_geometryBuffers.size(), buffers.data(), offsets.data());
}
void UpdateBufferInternal(ref_ptr<dp::GraphicsContext> context, VkBuffer buffer,
VkAccessFlagBits bufferAccessMask,
void UpdateBufferInternal(ref_ptr<dp::GraphicsContext> context, VkBuffer buffer, VkAccessFlagBits bufferAccessMask,
void const * data, uint32_t sizeInBytes)
{
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
@@ -199,8 +191,7 @@ private:
barrier.offset = 0;
barrier.size = sizeInBytes;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr,
1, &barrier, 0, nullptr);
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &barrier, 0, nullptr);
// Copy to default or temporary staging buffer.
auto stagingBuffer = vulkanContext->GetDefaultStagingBuffer();
@@ -237,9 +228,8 @@ private:
// Set up a barrier to prevent data collisions (read-after-write).
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
barrier.dstAccessMask = bufferAccessMask;
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, nullptr,
1, &barrier, 0, nullptr);
vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0,
nullptr, 1, &barrier, 0, nullptr);
}
ref_ptr<dp::MeshObject> m_mesh;
@@ -255,7 +245,6 @@ private:
void MeshObject::InitForVulkan(ref_ptr<dp::GraphicsContext> context)
{
ref_ptr<dp::vulkan::VulkanBaseContext> vulkanContext = context;
m_impl = make_unique_dp<vulkan::VulkanMeshObjectImpl>(vulkanContext->GetObjectManager(),
make_ref(this));
m_impl = make_unique_dp<vulkan::VulkanMeshObjectImpl>(vulkanContext->GetObjectManager(), make_ref(this));
}
} // namespace dp

View File

@@ -62,10 +62,8 @@ VulkanObjectManager::~VulkanObjectManager()
CollectDescriptorSetGroupsUnsafe(descriptorsToDestroy);
for (size_t i = 0; i < ThreadType::Count; ++i)
{
for (auto & q : m_queuesToDestroy[i])
CollectObjectsImpl(q);
}
for (auto const & s : m_samplers)
vkDestroySampler(m_device, s.second, nullptr);
@@ -86,8 +84,8 @@ void VulkanObjectManager::SetCurrentInflightFrameIndex(uint32_t index)
m_currentInflightFrameIndex = index;
}
VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType resourceType,
uint32_t sizeInBytes, uint64_t batcherHash)
VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType resourceType, uint32_t sizeInBytes,
uint64_t batcherHash)
{
VulkanObject result;
VkBufferCreateInfo info = {};
@@ -97,8 +95,8 @@ VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType
info.size = sizeInBytes;
if (resourceType == VulkanMemoryManager::ResourceType::Geometry)
{
info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
VK_BUFFER_USAGE_TRANSFER_DST_BIT;
info.usage =
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
}
else if (resourceType == VulkanMemoryManager::ResourceType::Uniform)
{
@@ -117,11 +115,14 @@ VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType
info.queueFamilyIndexCount = 1;
info.pQueueFamilyIndices = &m_queueFamilyIndex;
CHECK_VK_CALL(vkCreateBuffer(m_device, &info, nullptr, &result.m_buffer));
SET_DEBUG_NAME_VK(VK_OBJECT_TYPE_BUFFER, result.m_buffer,
((resourceType == VulkanMemoryManager::ResourceType::Geometry ? "B: Geometry (" :
(resourceType == VulkanMemoryManager::ResourceType::Uniform ? "B: Uniform (" :
"B: Staging (")) + std::to_string(sizeInBytes) + " bytes)").c_str());
SET_DEBUG_NAME_VK(
VK_OBJECT_TYPE_BUFFER, result.m_buffer,
((resourceType == VulkanMemoryManager::ResourceType::Geometry
? "B: Geometry ("
: (resourceType == VulkanMemoryManager::ResourceType::Uniform ? "B: Uniform (" : "B: Staging (")) +
std::to_string(sizeInBytes) + " bytes)")
.c_str());
VkMemoryRequirements memReqs = {};
vkGetBufferMemoryRequirements(m_device, result.m_buffer, &memReqs);
@@ -129,8 +130,7 @@ VulkanObject VulkanObjectManager::CreateBuffer(VulkanMemoryManager::ResourceType
{
std::lock_guard<std::mutex> lock(m_mutex);
result.m_allocation = m_memoryManager.Allocate(resourceType, memReqs, batcherHash);
CHECK_VK_CALL(vkBindBufferMemory(m_device, result.m_buffer, result.GetMemory(),
result.GetAlignedOffset()));
CHECK_VK_CALL(vkBindBufferMemory(m_device, result.m_buffer, result.GetMemory(), result.GetAlignedOffset()));
}
#ifdef ENABLE_TRACE
@@ -156,7 +156,7 @@ VulkanObject VulkanObjectManager::CreateImage(VkImageUsageFlags usageFlags, VkFo
imageCreateInfo.tiling = tiling;
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageCreateInfo.extent = { width, height, 1 };
imageCreateInfo.extent = {width, height, 1};
imageCreateInfo.usage = usageFlags | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
CHECK_VK_CALL(vkCreateImage(m_device, &imageCreateInfo, nullptr, &result.m_image));
@@ -165,10 +165,9 @@ VulkanObject VulkanObjectManager::CreateImage(VkImageUsageFlags usageFlags, VkFo
{
std::lock_guard<std::mutex> lock(m_mutex);
result.m_allocation = m_memoryManager.Allocate(VulkanMemoryManager::ResourceType::Image,
memReqs, 0 /* blockHash */);
CHECK_VK_CALL(vkBindImageMemory(m_device, result.m_image,
result.GetMemory(), result.GetAlignedOffset()));
result.m_allocation =
m_memoryManager.Allocate(VulkanMemoryManager::ResourceType::Image, memReqs, 0 /* blockHash */);
CHECK_VK_CALL(vkBindImageMemory(m_device, result.m_image, result.GetMemory(), result.GetAlignedOffset()));
}
VkImageViewCreateInfo viewCreateInfo = {};
@@ -183,8 +182,8 @@ VulkanObject VulkanObjectManager::CreateImage(VkImageUsageFlags usageFlags, VkFo
}
else
{
viewCreateInfo.components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A};
viewCreateInfo.components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B,
VK_COMPONENT_SWIZZLE_A};
}
viewCreateInfo.subresourceRange.aspectMask = aspectFlags;
viewCreateInfo.subresourceRange.baseMipLevel = 0;
@@ -202,7 +201,8 @@ VulkanObject VulkanObjectManager::CreateImage(VkImageUsageFlags usageFlags, VkFo
return result;
}
DescriptorSetGroup VulkanObjectManager::CreateDescriptorSetGroup(ref_ptr<VulkanGpuProgram> program) {
DescriptorSetGroup VulkanObjectManager::CreateDescriptorSetGroup(ref_ptr<VulkanGpuProgram> program)
{
CHECK(std::this_thread::get_id() == m_renderers[ThreadType::Frontend], ());
DescriptorSetGroup s;
@@ -296,8 +296,8 @@ void VulkanObjectManager::CollectDescriptorSetGroupsUnsafe(DescriptorSetGroupArr
for (auto const & d : descriptors)
{
CHECK_LESS(d.m_descriptorPoolIndex, m_descriptorPools.size(), ());
CHECK_VK_CALL(vkFreeDescriptorSets(m_device, m_descriptorPools[d.m_descriptorPoolIndex].m_pool,
1 /* count */, &d.m_descriptorSet));
CHECK_VK_CALL(vkFreeDescriptorSets(m_device, m_descriptorPools[d.m_descriptorPoolIndex].m_pool, 1 /* count */,
&d.m_descriptorSet));
m_descriptorPools[d.m_descriptorPoolIndex].m_availableSetsCount++;
}
descriptors.clear();
@@ -326,10 +326,7 @@ void VulkanObjectManager::CollectObjectsForThread(VulkanObjectArray & objects)
std::vector<VulkanObject> queueToDestroy;
std::swap(objects, queueToDestroy);
DrapeRoutine::Run([this, queueToDestroy = std::move(queueToDestroy)]()
{
CollectObjectsImpl(queueToDestroy);
});
DrapeRoutine::Run([this, queueToDestroy = std::move(queueToDestroy)]() { CollectObjectsImpl(queueToDestroy); });
}
void VulkanObjectManager::CollectObjectsImpl(VulkanObjectArray const & objects)
@@ -345,9 +342,7 @@ void VulkanObjectManager::CollectObjectsImpl(VulkanObjectArray const & objects)
#endif
}
if (obj.m_imageView != VK_NULL_HANDLE)
{
vkDestroyImageView(m_device, obj.m_imageView, nullptr);
}
if (obj.m_image != VK_NULL_HANDLE)
{
vkDestroyImage(m_device, obj.m_image, nullptr);
@@ -361,10 +356,8 @@ void VulkanObjectManager::CollectObjectsImpl(VulkanObjectArray const & objects)
std::lock_guard<std::mutex> lock(m_mutex);
m_memoryManager.BeginDeallocationSession();
for (auto const & obj : objects)
{
if (obj.m_allocation)
m_memoryManager.Deallocate(obj.m_allocation);
}
m_memoryManager.EndDeallocationSession();
}
@@ -389,8 +382,8 @@ uint8_t * VulkanObjectManager::MapUnsafe(VulkanObject object)
CHECK(object.m_buffer != VK_NULL_HANDLE || object.m_image != VK_NULL_HANDLE, ());
uint8_t * ptr = nullptr;
CHECK_VK_CALL(vkMapMemory(m_device, object.GetMemory(), object.GetAlignedOffset(),
object.GetAlignedSize(), 0, reinterpret_cast<void **>(&ptr)));
CHECK_VK_CALL(vkMapMemory(m_device, object.GetMemory(), object.GetAlignedOffset(), object.GetAlignedSize(), 0,
reinterpret_cast<void **>(&ptr)));
object.m_allocation->m_memoryBlock->m_isBlocked = true;
return ptr;
}
@@ -436,10 +429,9 @@ void VulkanObjectManager::CreateDescriptorPool()
{
CHECK_GREATER(m_maxUniformBuffers, 0, ());
CHECK_GREATER(m_maxImageSamplers, 0, ());
std::vector<VkDescriptorPoolSize> poolSizes =
{
{VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, m_maxUniformBuffers * kMaxDescriptorsSetCount},
{VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, m_maxImageSamplers * kMaxDescriptorsSetCount},
std::vector<VkDescriptorPoolSize> poolSizes = {
{VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, m_maxUniformBuffers * kMaxDescriptorsSetCount},
{VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, m_maxImageSamplers * kMaxDescriptorsSetCount},
};
VkDescriptorPoolCreateInfo descriptorPoolInfo = {};

View File

@@ -53,8 +53,7 @@ class VulkanObjectManager
{
public:
VulkanObjectManager(VkDevice device, VkPhysicalDeviceLimits const & deviceLimits,
VkPhysicalDeviceMemoryProperties const & memoryProperties,
uint32_t queueFamilyIndex);
VkPhysicalDeviceMemoryProperties const & memoryProperties, uint32_t queueFamilyIndex);
~VulkanObjectManager();
enum ThreadType
@@ -68,8 +67,7 @@ public:
void SetCurrentInflightFrameIndex(uint32_t index);
VulkanObject CreateBuffer(VulkanMemoryManager::ResourceType resourceType,
uint32_t sizeInBytes, uint64_t batcherHash);
VulkanObject CreateBuffer(VulkanMemoryManager::ResourceType resourceType, uint32_t sizeInBytes, uint64_t batcherHash);
VulkanObject CreateImage(VkImageUsageFlags usageFlags, VkFormat format, VkImageTiling tiling,
VkImageAspectFlags aspectFlags, uint32_t width, uint32_t height);
DescriptorSetGroup CreateDescriptorSetGroup(ref_ptr<VulkanGpuProgram> program);
@@ -90,7 +88,7 @@ public:
void DestroyObjectUnsafe(VulkanObject object);
VkDevice GetDevice() const { return m_device; }
VulkanMemoryManager const & GetMemoryManager() const { return m_memoryManager; };
VulkanMemoryManager const & GetMemoryManager() const { return m_memoryManager; }
VkSampler GetSampler(SamplerKey const & key);
void SetMaxUniformBuffers(uint32_t maxUniformBuffers);

View File

@@ -48,8 +48,7 @@ void DescriptorSetGroup::Update(VkDevice device, std::vector<ParamDescriptor> co
}
}
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorsCount),
writeDescriptorSets.data(), 0, nullptr);
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorsCount), writeDescriptorSets.data(), 0, nullptr);
}
ParamDescriptorUpdater::ParamDescriptorUpdater(ref_ptr<VulkanObjectManager> objectManager)

View File

@@ -45,8 +45,7 @@ struct DescriptorSetGroup
explicit operator bool()
{
return m_descriptorSet != VK_NULL_HANDLE &&
m_descriptorPoolIndex != std::numeric_limits<uint32_t>::max();
return m_descriptorSet != VK_NULL_HANDLE && m_descriptorPoolIndex != std::numeric_limits<uint32_t>::max();
}
void Update(VkDevice device, std::vector<ParamDescriptor> const & descriptors);

View File

@@ -150,8 +150,7 @@ std::string GetDumpFilePath()
}
} // namespace
VulkanPipeline::VulkanPipeline(VkDevice device, uint32_t appVersionCode)
: m_appVersionCode(appVersionCode)
VulkanPipeline::VulkanPipeline(VkDevice device, uint32_t appVersionCode) : m_appVersionCode(appVersionCode)
{
// Read dump.
std::vector<uint8_t> dumpData;
@@ -177,8 +176,7 @@ VulkanPipeline::VulkanPipeline(VkDevice device, uint32_t appVersionCode)
}
catch (FileReader::Exception const & exception)
{
LOG(LWARNING, ("Exception while reading file:", dumpFilePath,
"reason:", exception.what()));
LOG(LWARNING, ("Exception while reading file:", dumpFilePath, "reason:", exception.what()));
}
}
@@ -230,16 +228,14 @@ void VulkanPipeline::Dump(VkDevice device)
}
catch (FileWriter::Exception const & exception)
{
LOG(LWARNING, ("Exception while writing file:", dumpFilePath,
"reason:", exception.what()));
LOG(LWARNING, ("Exception while writing file:", dumpFilePath, "reason:", exception.what()));
}
m_isChanged = false;
}
}
else
{
LOG(LWARNING, ("Maximum pipeline cache size exceeded (", cacheSize, "/", kMaxCacheSizeInBytes,
"bytes)"));
LOG(LWARNING, ("Maximum pipeline cache size exceeded (", cacheSize, "/", kMaxCacheSizeInBytes, "bytes)"));
}
}
}
@@ -299,8 +295,8 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
// Blending.
VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
pipelineColorBlendAttachmentState.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |
VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
pipelineColorBlendAttachmentState.colorWriteMask =
VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
pipelineColorBlendAttachmentState.blendEnable = key.m_blendingEnabled ? VK_TRUE : VK_FALSE;
if (key.m_blendingEnabled)
{
@@ -328,9 +324,8 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
multisampleStateCreateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
// Dynamic.
static std::array<VkDynamicState, 4> dynamicState = {
VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, VK_DYNAMIC_STATE_LINE_WIDTH,
VK_DYNAMIC_STATE_STENCIL_REFERENCE};
static std::array<VkDynamicState, 4> dynamicState = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR,
VK_DYNAMIC_STATE_LINE_WIDTH, VK_DYNAMIC_STATE_STENCIL_REFERENCE};
VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo = {};
dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
dynamicStateCreateInfo.pDynamicStates = dynamicState.data();
@@ -356,8 +351,8 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
BindingDecl const & bindingDecl = key.m_bindingInfo[i].GetBindingDecl(j);
attributeDescriptions[bindingCounter].location = bindingCounter;
attributeDescriptions[bindingCounter].binding = static_cast<uint32_t>(i);
attributeDescriptions[bindingCounter].format = GetAttributeFormat(bindingDecl.m_componentCount,
bindingDecl.m_componentType);
attributeDescriptions[bindingCounter].format =
GetAttributeFormat(bindingDecl.m_componentCount, bindingDecl.m_componentType);
attributeDescriptions[bindingCounter].offset = bindingDecl.m_offset;
bindingCounter++;
@@ -378,8 +373,7 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
if (key.m_depthStencil.m_depthEnabled)
{
depthStencilState.depthWriteEnable = VK_TRUE;
depthStencilState.depthCompareOp =
DecodeTestFunction(static_cast<uint8_t>(key.m_depthStencil.m_depthFunction));
depthStencilState.depthCompareOp = DecodeTestFunction(static_cast<uint8_t>(key.m_depthStencil.m_depthFunction));
}
else
{
@@ -391,25 +385,25 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
{
depthStencilState.stencilTestEnable = VK_TRUE;
depthStencilState.front.compareOp =
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFunctionByte));
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFunctionByte));
depthStencilState.front.failOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFailActionByte));
depthStencilState.front.depthFailOp = DecodeStencilAction(
GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontDepthFailActionByte));
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontFailActionByte));
depthStencilState.front.depthFailOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontDepthFailActionByte));
depthStencilState.front.passOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontPassActionByte));
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilFrontPassActionByte));
depthStencilState.front.writeMask = 0xffffffff;
depthStencilState.front.compareMask = 0xffffffff;
depthStencilState.front.reference = 1;
depthStencilState.back.compareOp =
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFunctionByte));
DecodeTestFunction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFunctionByte));
depthStencilState.back.failOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFailActionByte));
depthStencilState.back.depthFailOp = DecodeStencilAction(
GetStateByte(key.m_depthStencil.m_stencil, kStencilBackDepthFailActionByte));
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackFailActionByte));
depthStencilState.back.depthFailOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackDepthFailActionByte));
depthStencilState.back.passOp =
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackPassActionByte));
DecodeStencilAction(GetStateByte(key.m_depthStencil.m_stencil, kStencilBackPassActionByte));
depthStencilState.back.writeMask = 0xffffffff;
depthStencilState.back.compareMask = 0xffffffff;
depthStencilState.back.reference = 1;
@@ -442,8 +436,8 @@ VkPipeline VulkanPipeline::GetPipeline(VkDevice device, PipelineKey const & key)
pipelineCreateInfo.pStages = shaders.data();
VkPipeline pipeline;
auto const result = vkCreateGraphicsPipelines(device, m_vulkanPipelineCache, 1,
&pipelineCreateInfo, nullptr, &pipeline);
auto const result =
vkCreateGraphicsPipelines(device, m_vulkanPipelineCache, 1, &pipelineCreateInfo, nullptr, &pipeline);
if (result == VK_INCOMPLETE)
{
// Some Adreno GPUs return this not standard compliant code.
@@ -467,17 +461,17 @@ void VulkanPipeline::DepthStencilKey::SetDepthTestEnabled(bool enabled)
{
m_depthEnabled = enabled;
}
void VulkanPipeline::DepthStencilKey::SetDepthTestFunction(TestFunction depthFunction)
{
m_depthFunction = depthFunction;
}
void VulkanPipeline::DepthStencilKey::SetStencilTestEnabled(bool enabled)
{
m_stencilEnabled = enabled;
}
void VulkanPipeline::DepthStencilKey::SetStencilFunction(StencilFace face, TestFunction stencilFunction)
{
switch (face)
@@ -494,7 +488,7 @@ void VulkanPipeline::DepthStencilKey::SetStencilFunction(StencilFace face, TestF
break;
}
}
void VulkanPipeline::DepthStencilKey::SetStencilActions(StencilFace face, StencilAction stencilFailAction,
StencilAction depthFailAction, StencilAction passAction)
{
@@ -525,13 +519,13 @@ bool VulkanPipeline::DepthStencilKey::operator<(DepthStencilKey const & rhs) con
{
if (m_depthEnabled != rhs.m_depthEnabled)
return m_depthEnabled < rhs.m_depthEnabled;
if (m_stencilEnabled != rhs.m_stencilEnabled)
return m_stencilEnabled < rhs.m_stencilEnabled;
if (m_depthFunction != rhs.m_depthFunction)
return m_depthFunction < rhs.m_depthFunction;
return m_stencil < rhs.m_stencil;
}
@@ -548,7 +542,7 @@ bool VulkanPipeline::PipelineKey::operator<(PipelineKey const & rhs) const
if (m_program != rhs.m_program)
return m_program < rhs.m_program;
if (m_depthStencil != rhs.m_depthStencil)
return m_depthStencil < rhs.m_depthStencil;
@@ -556,10 +550,8 @@ bool VulkanPipeline::PipelineKey::operator<(PipelineKey const & rhs) const
return m_bindingInfoCount < rhs.m_bindingInfoCount;
for (uint8_t i = 0; i < m_bindingInfoCount; ++i)
{
if (m_bindingInfo[i] != rhs.m_bindingInfo[i])
return m_bindingInfo[i] < rhs.m_bindingInfo[i];
}
if (m_primitiveTopology != rhs.m_primitiveTopology)
return m_primitiveTopology < rhs.m_primitiveTopology;

View File

@@ -22,11 +22,11 @@ public:
void SetDepthTestFunction(TestFunction depthFunction);
void SetStencilTestEnabled(bool enabled);
void SetStencilFunction(StencilFace face, TestFunction stencilFunction);
void SetStencilActions(StencilFace face, StencilAction stencilFailAction,
StencilAction depthFailAction, StencilAction passAction);
void SetStencilActions(StencilFace face, StencilAction stencilFailAction, StencilAction depthFailAction,
StencilAction passAction);
bool operator<(DepthStencilKey const & rhs) const;
bool operator!=(DepthStencilKey const & rhs) const;
bool m_depthEnabled = false;
bool m_stencilEnabled = false;
TestFunction m_depthFunction = TestFunction::Always;

View File

@@ -13,8 +13,7 @@ namespace vulkan
// The most GPUs use this value, real one can be known only after buffer creation.
uint32_t constexpr kDefaultAlignment = 64;
VulkanStagingBuffer::VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager,
uint32_t sizeInBytes)
VulkanStagingBuffer::VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager, uint32_t sizeInBytes)
: m_objectManager(objectManager)
, m_sizeInBytes(VulkanMemoryManager::GetAligned(sizeInBytes, kDefaultAlignment))
{
@@ -38,10 +37,8 @@ VulkanStagingBuffer::VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectMana
m_object = m_objectManager->CreateBuffer(kStagingBuffer, m_sizeInBytes, 0 /* batcherHash */);
vkGetBufferMemoryRequirements(device, m_object.m_buffer, &memReqs);
m_sizeAlignment = mm.GetSizeAlignment(memReqs);
CHECK(HasEnoughSpace(m_sizeInBytes), ("originalSize =", originalSize,
"originalAlignment =", originalAlignment,
"m_sizeInBytes =", m_sizeInBytes,
"m_sizeAlignment =", m_sizeAlignment));
CHECK(HasEnoughSpace(m_sizeInBytes), ("originalSize =", originalSize, "originalAlignment =", originalAlignment,
"m_sizeInBytes =", m_sizeInBytes, "m_sizeAlignment =", m_sizeAlignment));
}
m_offsetAlignment = mm.GetOffsetAlignment(kStagingBuffer);

View File

@@ -16,8 +16,7 @@ namespace vulkan
class VulkanStagingBuffer
{
public:
VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager,
uint32_t sizeInBytes);
VulkanStagingBuffer(ref_ptr<VulkanObjectManager> objectManager, uint32_t sizeInBytes);
~VulkanStagingBuffer();
struct StagingData

View File

@@ -2,7 +2,6 @@
#include "drape/vulkan/vulkan_base_context.hpp"
#include "drape/vulkan/vulkan_staging_buffer.hpp"
drape_ptr<dp::HWTextureAllocator> CreateVulkanAllocator()
{
return make_unique_dp<dp::vulkan::VulkanTextureAllocator>();
@@ -20,8 +19,7 @@ namespace vulkan
{
namespace
{
VkBufferImageCopy BufferCopyRegion(uint32_t x, uint32_t y, uint32_t width, uint32_t height,
uint32_t stagingOffset)
VkBufferImageCopy BufferCopyRegion(uint32_t x, uint32_t y, uint32_t width, uint32_t height, uint32_t stagingOffset)
{
VkBufferImageCopy bufferCopyRegion = {};
bufferCopyRegion.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
@@ -80,18 +78,17 @@ void VulkanTexture::Create(ref_ptr<dp::GraphicsContext> context, Params const &
// Create image.
if (params.m_format == TextureFormat::DepthStencil || params.m_format == TextureFormat::Depth)
{
m_aspectFlags =
params.m_format == TextureFormat::DepthStencil ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
: VK_IMAGE_ASPECT_DEPTH_BIT;
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
format, tiling, m_aspectFlags, params.m_width, params.m_height);
m_aspectFlags = params.m_format == TextureFormat::DepthStencil
? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
: VK_IMAGE_ASPECT_DEPTH_BIT;
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, format, tiling,
m_aspectFlags, params.m_width, params.m_height);
}
else
{
m_aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
m_textureObject = m_objectManager->CreateImage(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
format, tiling, m_aspectFlags,
params.m_width, params.m_height);
format, tiling, m_aspectFlags, params.m_width, params.m_height);
}
}
else
@@ -115,8 +112,8 @@ void VulkanTexture::Create(ref_ptr<dp::GraphicsContext> context, Params const &
}
}
void VulkanTexture::UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y,
uint32_t width, uint32_t height, ref_ptr<void> data)
void VulkanTexture::UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y, uint32_t width,
uint32_t height, ref_ptr<void> data)
{
CHECK(m_isMutable, ("Upload data is avaivable only for mutable textures."));
CHECK(m_creationStagingBuffer == nullptr, ());
@@ -156,16 +153,16 @@ void VulkanTexture::UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x,
// Here we use VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, because we also read textures
// in vertex shaders.
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT);
MakeImageLayoutTransition(
commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT);
auto bufferCopyRegion = BufferCopyRegion(x, y, width, height, offset);
vkCmdCopyBufferToImage(commandBuffer, sb, m_textureObject.m_image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &bufferCopyRegion);
vkCmdCopyBufferToImage(commandBuffer, sb, m_textureObject.m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
&bufferCopyRegion);
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_TRANSFER_BIT,
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
}
@@ -180,17 +177,17 @@ void VulkanTexture::Bind(ref_ptr<dp::GraphicsContext> context) const
{
// Here we use VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, because we also read textures
// in vertex shaders.
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT);
MakeImageLayoutTransition(
commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT);
auto staging = m_creationStagingBuffer->GetReservationById(m_reservationId);
auto bufferCopyRegion = BufferCopyRegion(0, 0, GetWidth(), GetHeight(), staging.m_offset);
vkCmdCopyBufferToImage(commandBuffer, staging.m_stagingBuffer, m_textureObject.m_image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &bufferCopyRegion);
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_TRANSFER_BIT,
MakeImageLayoutTransition(commandBuffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
m_creationStagingBuffer.reset();
@@ -204,8 +201,7 @@ void VulkanTexture::SetFilter(TextureFilter filter)
bool VulkanTexture::Validate() const
{
return m_textureObject.m_image != VK_NULL_HANDLE &&
m_textureObject.m_imageView != VK_NULL_HANDLE;
return m_textureObject.m_image != VK_NULL_HANDLE && m_textureObject.m_imageView != VK_NULL_HANDLE;
}
SamplerKey VulkanTexture::GetSamplerKey() const
@@ -213,75 +209,63 @@ SamplerKey VulkanTexture::GetSamplerKey() const
return SamplerKey(m_params.m_filter, m_params.m_wrapSMode, m_params.m_wrapTMode);
}
void VulkanTexture::MakeImageLayoutTransition(VkCommandBuffer commandBuffer,
VkImageLayout newLayout,
void VulkanTexture::MakeImageLayoutTransition(VkCommandBuffer commandBuffer, VkImageLayout newLayout,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask) const
{
VkAccessFlags srcAccessMask = 0;
VkAccessFlags dstAccessMask = 0;
VkPipelineStageFlags const noAccessMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT |
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT |
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
VkPipelineStageFlags const noAccessMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
VkPipelineStageFlags srcRemainingMask = srcStageMask & ~noAccessMask;
VkPipelineStageFlags dstRemainingMask = dstStageMask & ~noAccessMask;
auto const srcTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit,
VkAccessFlags accessBits) {
if (srcStageMask & stageBit)
auto const srcTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit, VkAccessFlags accessBits)
{
if (srcStageMask & stageBit)
{
srcAccessMask |= accessBits;
srcRemainingMask &= ~stageBit;
}
}
};
srcTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_SHADER_READ_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
srcTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
srcTestAndRemoveBit(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
CHECK(srcRemainingMask == 0, ("Not implemented transition for src pipeline stage"));
auto const dstTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit,
VkAccessFlags accessBits) {
if (dstStageMask & stageBit)
auto const dstTestAndRemoveBit = [&](VkPipelineStageFlagBits stageBit, VkAccessFlags accessBits)
{
if (dstStageMask & stageBit)
{
dstAccessMask |= accessBits;
dstRemainingMask &= ~stageBit;
}
}
};
dstTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_ACCESS_SHADER_READ_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
dstTestAndRemoveBit(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT);
dstTestAndRemoveBit(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT);
CHECK(dstRemainingMask == 0, ("Not implemented transition for dest pipeline stage"));
@@ -301,8 +285,7 @@ void VulkanTexture::MakeImageLayoutTransition(VkCommandBuffer commandBuffer,
imageMemoryBarrier.subresourceRange.baseArrayLayer = 0;
imageMemoryBarrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, 0, 0,
nullptr, 0, nullptr, 1, &imageMemoryBarrier);
vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &imageMemoryBarrier);
m_currentLayout = newLayout;
}

View File

@@ -23,14 +23,14 @@ public:
class VulkanTexture : public HWTexture
{
using Base = HWTexture;
public:
explicit VulkanTexture(ref_ptr<VulkanTextureAllocator>) {}
~VulkanTexture() override;
void Create(ref_ptr<dp::GraphicsContext> context, Params const & params,
ref_ptr<void> data) override;
void UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y, uint32_t width,
uint32_t height, ref_ptr<void> data) override;
void Create(ref_ptr<dp::GraphicsContext> context, Params const & params, ref_ptr<void> data) override;
void UploadData(ref_ptr<dp::GraphicsContext> context, uint32_t x, uint32_t y, uint32_t width, uint32_t height,
ref_ptr<void> data) override;
void Bind(ref_ptr<dp::GraphicsContext> context) const override;
void SetFilter(TextureFilter filter) override;
bool Validate() const override;
@@ -40,10 +40,8 @@ public:
SamplerKey GetSamplerKey() const;
VkImageLayout GetCurrentLayout() const { return m_currentLayout; }
void MakeImageLayoutTransition(VkCommandBuffer commandBuffer,
VkImageLayout newLayout,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask) const;
void MakeImageLayoutTransition(VkCommandBuffer commandBuffer, VkImageLayout newLayout,
VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) const;
private:
ref_ptr<VulkanObjectManager> m_objectManager;

View File

@@ -24,7 +24,7 @@ static bool gUse32bitDepth8bitStencil = false;
void DebugName::Init(VkInstance instance, VkDevice device)
{
vkSetDebugUtilsObjectNameEXT =
(PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(instance, "vkSetDebugUtilsObjectNameEXT");
(PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(instance, "vkSetDebugUtilsObjectNameEXT");
m_device = device;
}
@@ -33,13 +33,11 @@ void DebugName::Set(VkObjectType type, uint64_t handle, char const * name)
if (vkSetDebugUtilsObjectNameEXT == nullptr)
return;
VkDebugUtilsObjectNameInfoEXT const info = {
.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
.pNext = nullptr,
.objectType = type,
.objectHandle = handle,
.pObjectName = name
};
VkDebugUtilsObjectNameInfoEXT const info = {.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
.pNext = nullptr,
.objectType = type,
.objectHandle = handle,
.pObjectName = name};
CHECK_VK_CALL(vkSetDebugUtilsObjectNameEXT(m_device, &info));
}
@@ -109,9 +107,7 @@ VkFormat VulkanFormatUnpacker::m_bestDepthFormat = VK_FORMAT_UNDEFINED;
// static
bool VulkanFormatUnpacker::Init(VkPhysicalDevice gpu)
{
std::array<VkFormat, 3> depthFormats = {{VK_FORMAT_D32_SFLOAT,
VK_FORMAT_X8_D24_UNORM_PACK32,
VK_FORMAT_D16_UNORM}};
std::array<VkFormat, 3> depthFormats = {{VK_FORMAT_D32_SFLOAT, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D16_UNORM}};
VkFormatProperties formatProperties;
for (auto depthFormat : depthFormats)
{
@@ -141,8 +137,7 @@ bool VulkanFormatUnpacker::Init(VkPhysicalDevice gpu)
}
}
std::array<VkFormat, 2> framebufferColorFormats = {{Unpack(TextureFormat::RGBA8),
Unpack(TextureFormat::RedGreen)}};
std::array<VkFormat, 2> framebufferColorFormats = {{Unpack(TextureFormat::RGBA8), Unpack(TextureFormat::RedGreen)}};
for (auto colorFormat : framebufferColorFormats)
{
vkGetPhysicalDeviceFormatProperties(gpu, colorFormat, &formatProperties);
@@ -167,12 +162,11 @@ VkFormat VulkanFormatUnpacker::Unpack(TextureFormat format)
#if defined(OMIM_OS_MAC)
case TextureFormat::DepthStencil: return VK_FORMAT_D32_SFLOAT_S8_UINT;
#else
case TextureFormat::DepthStencil: return gUse32bitDepth8bitStencil ? VK_FORMAT_D32_SFLOAT_S8_UINT : VK_FORMAT_D24_UNORM_S8_UINT;
case TextureFormat::DepthStencil:
return gUse32bitDepth8bitStencil ? VK_FORMAT_D32_SFLOAT_S8_UINT : VK_FORMAT_D24_UNORM_S8_UINT;
#endif
case TextureFormat::Depth: return m_bestDepthFormat;
case TextureFormat::Unspecified:
CHECK(false, ());
return VK_FORMAT_UNDEFINED;
case TextureFormat::Unspecified: CHECK(false, ()); return VK_FORMAT_UNDEFINED;
}
UNREACHABLE();
}

View File

@@ -28,7 +28,7 @@ private:
static VkFormat m_bestDepthFormat;
};
template<typename T>
template <typename T>
void SetStateByte(T & state, uint8_t value, uint8_t byteNumber)
{
auto const shift = byteNumber * 8;
@@ -36,7 +36,7 @@ void SetStateByte(T & state, uint8_t value, uint8_t byteNumber)
state = (state & mask) | (static_cast<T>(value) << shift);
}
template<typename T>
template <typename T>
uint8_t GetStateByte(T & state, uint8_t byteNumber)
{
return static_cast<uint8_t>((state >> byteNumber * 8) & 0xFF);
@@ -69,41 +69,50 @@ private:
} // namespace vulkan
} // namespace dp
#define LOG_ERROR_VK_CALL(method, statusCode) \
LOG(LDEBUG, ("Vulkan error:", #method, "finished with code", \
dp::vulkan::GetVulkanResultString(statusCode)))
#define LOG_ERROR_VK_CALL(method, statusCode) \
LOG(LDEBUG, ("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode)))
#define LOG_ERROR_VK(message) LOG(LDEBUG, ("Vulkan error:", message))
#define CHECK_VK_CALL(method) \
do { \
VkResult const statusCode = method; \
CHECK(statusCode == VK_SUCCESS, ("Vulkan error:", #method, "finished with code", \
dp::vulkan::GetVulkanResultString(statusCode))); \
} while (false)
#define CHECK_VK_CALL(method) \
do \
{ \
VkResult const statusCode = method; \
CHECK(statusCode == VK_SUCCESS, \
("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode))); \
} \
while (false)
#define CHECK_VK_CALL_EX(method, msg) \
do { \
VkResult const statusCode = method; \
#define CHECK_VK_CALL_EX(method, msg) \
do \
{ \
VkResult const statusCode = method; \
CHECK_EQUAL(statusCode, VK_SUCCESS, msg); \
} while (false)
} \
while (false)
#define CHECK_RESULT_VK_CALL(method, statusCode) \
do { \
CHECK(statusCode == VK_SUCCESS, ("Vulkan error:", #method, "finished with code", \
dp::vulkan::GetVulkanResultString(statusCode))); \
} while (false)
#define CHECK_RESULT_VK_CALL(method, statusCode) \
do \
{ \
CHECK(statusCode == VK_SUCCESS, \
("Vulkan error:", #method, "finished with code", dp::vulkan::GetVulkanResultString(statusCode))); \
} \
while (false)
#if defined(OMIM_OS_MAC) || defined(OMIM_OS_LINUX)
#define INIT_DEBUG_NAME_VK(instance, device) \
do { \
DebugName::Init(instance, device); \
} while (false)
do \
{ \
DebugName::Init(instance, device); \
} \
while (false)
#define SET_DEBUG_NAME_VK(type, handle, name) \
do { \
#define SET_DEBUG_NAME_VK(type, handle, name) \
do \
{ \
DebugName::Set(type, (uint64_t)handle, name); \
} while (false)
} \
while (false)
#else
#define INIT_DEBUG_NAME_VK(instance, device)
#define SET_DEBUG_NAME_VK(type, handle, name)

View File

@@ -21,10 +21,8 @@ namespace vulkan
class VulkanVertexArrayBufferImpl : public VertexArrayBufferImpl
{
public:
VulkanVertexArrayBufferImpl(ref_ptr<VertexArrayBuffer> buffer,
ref_ptr<VulkanObjectManager> objectManager,
BindingInfoArray && bindingInfo,
uint8_t bindingInfoCount)
VulkanVertexArrayBufferImpl(ref_ptr<VertexArrayBuffer> buffer, ref_ptr<VulkanObjectManager> objectManager,
BindingInfoArray && bindingInfo, uint8_t bindingInfoCount)
: m_vertexArrayBuffer(std::move(buffer))
, m_objectManager(objectManager)
, m_bindingInfo(std::move(bindingInfo))
@@ -32,23 +30,19 @@ public:
, m_descriptorUpdater(objectManager)
{}
~VulkanVertexArrayBufferImpl() override
{
m_descriptorUpdater.Destroy();
}
~VulkanVertexArrayBufferImpl() override { m_descriptorUpdater.Destroy(); }
bool Build(ref_ptr<GpuProgram> program) override
{
UNUSED_VALUE(program);
return true;
}
bool Bind() override { return true; }
void Unbind() override {}
void BindBuffers(dp::BuffersMap const & buffers) const override {}
void RenderRange(ref_ptr<GraphicsContext> context, bool drawAsLine,
IndicesRange const & range) override
void RenderRange(ref_ptr<GraphicsContext> context, bool drawAsLine, IndicesRange const & range) override
{
CHECK(m_vertexArrayBuffer->HasBuffers(), ());
@@ -56,20 +50,18 @@ public:
VkCommandBuffer commandBuffer = vulkanContext->GetCurrentRenderingCommandBuffer();
CHECK(commandBuffer != nullptr, ());
vulkanContext->SetPrimitiveTopology(drawAsLine ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST :
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
vulkanContext->SetPrimitiveTopology(drawAsLine ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST
: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
vulkanContext->SetBindingInfo(m_bindingInfo, m_bindingInfoCount);
m_descriptorUpdater.Update(context);
auto descriptorSet = m_descriptorUpdater.GetDescriptorSet();
uint32_t dynamicOffset = vulkanContext->GetCurrentDynamicBufferOffset();
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanContext->GetCurrentPipelineLayout(), 0, 1,
&descriptorSet, 1, &dynamicOffset);
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipelineLayout(),
0, 1, &descriptorSet, 1, &dynamicOffset);
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanContext->GetCurrentPipeline());
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanContext->GetCurrentPipeline());
size_t constexpr kMaxBuffersCount = 4;
std::array<VkBuffer, kMaxBuffersCount> buffers = {};
@@ -100,7 +92,7 @@ public:
vkCmdDrawIndexed(commandBuffer, range.m_idxCount, 1, range.m_idxStart, 0, 0);
}
private:
ref_ptr<VertexArrayBuffer> m_vertexArrayBuffer;
ref_ptr<VulkanObjectManager> m_objectManager;
@@ -109,7 +101,7 @@ private:
ParamDescriptorUpdater m_descriptorUpdater;
};
} // namespace vulkan
drape_ptr<VertexArrayBufferImpl> VertexArrayBuffer::CreateImplForVulkan(ref_ptr<GraphicsContext> context,
ref_ptr<VertexArrayBuffer> buffer,
BindingInfoArray && bindingInfo,