From 984a506cf9ba4a9221d6c1736337730fd03ed7d3 Mon Sep 17 00:00:00 2001 From: Karol Suprynowicz Date: Mon, 6 Jan 2025 16:02:27 +0100 Subject: [PATCH] Vulkan validation error fixes --- libraries/gpu-vk/src/gpu/vk/VKBackend.cpp | 127 ++++++++++++--- .../gpu-vk/src/gpu/vk/VKPipelineCache.cpp | 30 ++-- libraries/gpu-vk/src/gpu/vk/VKShared.cpp | 14 +- libraries/gpu-vk/src/gpu/vk/VKShared.h | 3 +- libraries/gpu-vk/src/gpu/vk/VKTexture.cpp | 13 +- libraries/vk/src/vk/Renderpasses.h | 150 ------------------ 6 files changed, 144 insertions(+), 193 deletions(-) diff --git a/libraries/gpu-vk/src/gpu/vk/VKBackend.cpp b/libraries/gpu-vk/src/gpu/vk/VKBackend.cpp index 6eb4f51f069..a9930065518 100644 --- a/libraries/gpu-vk/src/gpu/vk/VKBackend.cpp +++ b/libraries/gpu-vk/src/gpu/vk/VKBackend.cpp @@ -889,16 +889,21 @@ void VKBackend::updateVkDescriptorWriteSetsTexture(VkDescriptorSet target) { // VKTODO: move vulkan texture creation to the transfer parts VKTexture* texture = syncGPUObject(*_resource._textures[i].texture); VkDescriptorImageInfo imageInfo{}; + imageInfo.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED; if (texture) { imageInfo = texture->getDescriptorImageInfo(); - } else { + } + if (imageInfo.imageLayout == VK_IMAGE_LAYOUT_UNDEFINED) { qDebug() << "Writing descriptor " << i << " with texture: " << _resource._textures[i].texture->source(); - if (_resource._textures[i].texture) { + if (texture) { + qDebug() << "Warning: Texture " + _resource._textures[i].texture->source() + " being bound at input slot " + << i << " is in VK_IMAGE_LAYOUT_UNDEFINED layout."; + } else if (_resource._textures[i].texture) { qDebug() << "Cannot sync texture during descriptor " << i << " write: " << _resource._textures[i].texture->source(); } else { qDebug() << "Texture is null during descriptor " << i - << " write: " << _resource._textures[i].texture->source(); + << " write."; } imageInfo = _defaultTextureImageInfo; } @@ -1088,7 +1093,7 @@ void VKBackend::updateRenderPass() { auto framebuffer = syncGPUObject(*_cache.pipelineState.framebuffer); // Current render pass is already up to date. - // VKTODO: check if framebuffer has changed and if so update render pass too + // VKTODO: check if framebuffer has changed and if so update render pass too. if (_currentVkRenderPass == renderPass && _currentVkFramebuffer == framebuffer->vkFramebuffer) { return; } @@ -2667,14 +2672,19 @@ void VKBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) { Q_ASSERT(TextureUsageType::RENDERBUFFER == surface->getUsageType()); vkTexture = backend->syncGPUObject(*surface.get());*/ auto texture = renderBuffers[i]._texture; - VKAttachmentTexture *attachmentTexture = nullptr; - if (texture) { - auto gpuTexture = syncGPUObject(*texture); - if (gpuTexture) { - attachmentTexture = dynamic_cast(gpuTexture); - Q_ASSERT(attachmentTexture); - } + if (!texture) { + // Last texture of the key can be depth stencil attachment + Q_ASSERT(i + 1 == key.size()); + texture = framebuffer->getDepthStencilBuffer(); } + Q_ASSERT(texture); + VKAttachmentTexture *attachmentTexture = nullptr; + //if (texture) { + auto gpuTexture = syncGPUObject(*texture); + Q_ASSERT(gpuTexture); + attachmentTexture = dynamic_cast(gpuTexture); + Q_ASSERT(attachmentTexture); + //} VkAttachmentDescription attachment{}; attachment.format = formatAndLayout.first; attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; @@ -2685,35 +2695,33 @@ void VKBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) { } //attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attachment.samples = VK_SAMPLE_COUNT_1_BIT; - if (isDepthStencilFormat(formatAndLayout.first)) { + if (texture->isDepthStencilRenderTarget()) { clearValues.push_back(VkClearValue{.depthStencil = VkClearDepthStencilValue{ depth, (uint32_t)stencil }}); if (masks & Framebuffer::BUFFER_DEPTH) { attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + if (!formatHasStencil(attachment.format)) { + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + } } else { attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; } // Texture state needs to be updated - auto depthStencil = framebuffer->getDepthStencilBuffer(); - Q_ASSERT(depthStencil); - auto gpuDepthStencil = syncGPUObject(*depthStencil); - Q_ASSERT(gpuDepthStencil); - auto depthStencilAttachmentTexture = dynamic_cast(gpuDepthStencil); - Q_ASSERT(depthStencilAttachmentTexture); - - if ((masks & Framebuffer::BUFFER_DEPTH) && (masks & Framebuffer::BUFFER_STENCIL)) { + + if (((masks & Framebuffer::BUFFER_DEPTH) && (masks & Framebuffer::BUFFER_STENCIL)) + || ((masks & Framebuffer::BUFFER_DEPTH) && !formatHasStencil(attachment.format))) { attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; - depthStencilAttachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + attachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; } else { - if (depthStencilAttachmentTexture->_vkImageLayout == VK_IMAGE_LAYOUT_GENERAL) { + if (attachmentTexture->_vkImageLayout == VK_IMAGE_LAYOUT_GENERAL) { attachment.initialLayout = VK_IMAGE_LAYOUT_GENERAL; attachment.finalLayout = VK_IMAGE_LAYOUT_GENERAL; depthReference.layout = VK_IMAGE_LAYOUT_GENERAL; } else { attachment.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; - depthStencilAttachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + attachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; } } @@ -2782,7 +2790,80 @@ void VKBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) { vkCmdBeginRenderPass(_currentCommandBuffer, &beginInfo, VK_SUBPASS_CONTENTS_INLINE); vkCmdEndRenderPass(_currentCommandBuffer); _currentFrame->_renderPasses.push_back(renderPass); // To be deleted after frame rendering is complete + // VKTODO: some sort of barrier is needed? + for (auto &renderBuffer : renderBuffers) { + auto texture = renderBuffer._texture; + if (!texture) { + continue; + } + auto gpuTexture = syncGPUObject(*texture); + Q_ASSERT(gpuTexture); + auto *attachmentTexture = dynamic_cast(gpuTexture); + Q_ASSERT(attachmentTexture); + if (masks & Framebuffer::BUFFER_COLORS) { + if (attachmentTexture->_gpuObject.isDepthStencilRenderTarget()) { + // VKTODO: Check if the same depth render target is used as one of the inputs, if so then don't update it here + VkImageSubresourceRange mipSubRange = {}; + mipSubRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; + mipSubRange.baseMipLevel = 0; + mipSubRange.levelCount = 1; + mipSubRange.layerCount = 1; + vks::tools::insertImageMemoryBarrier(_currentCommandBuffer, attachmentTexture->_vkImage, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // VKTODO: should be + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, // VKTODO + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, mipSubRange); // VKTODO: what stage mask for depth stencil? + attachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + } else { + VkImageSubresourceRange mipSubRange = {}; + mipSubRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + mipSubRange.baseMipLevel = 0; + mipSubRange.levelCount = 1; + mipSubRange.layerCount = 1; + vks::tools::insertImageMemoryBarrier(_currentCommandBuffer, attachmentTexture->_vkImage, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VKTODO: should be + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, // VKTODO + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, mipSubRange); + attachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + } + } + } + + if (masks & Framebuffer::BUFFER_STENCIL || masks & Framebuffer::BUFFER_DEPTH) { + auto texture = framebuffer->getDepthStencilBuffer(); + Q_ASSERT(texture); + auto gpuTexture = syncGPUObject(*texture); + Q_ASSERT(gpuTexture); + auto *attachmentTexture = dynamic_cast(gpuTexture); + Q_ASSERT(attachmentTexture); + + if ( attachmentTexture->_vkImage == (VkImage)(0x260000000026UL)) { + printf("0x260000000026UL"); + } + + VkImageSubresourceRange mipSubRange = {}; + mipSubRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; + if (formatHasStencil(evalTexelFormatInternal(texture->getTexelFormat()))) { + mipSubRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT; + } + mipSubRange.baseMipLevel = 0; + mipSubRange.levelCount = 1; + mipSubRange.layerCount = 1; + vks::tools::insertImageMemoryBarrier(_currentCommandBuffer, attachmentTexture->_vkImage, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // VKTODO: should be + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, // VKTODO + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, mipSubRange); // VKTODO: what stage mask for depth stencil? + attachmentTexture->_vkImageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + } /*VKuint glmask = 0; if (masks & Framebuffer::BUFFER_STENCIL) { diff --git a/libraries/gpu-vk/src/gpu/vk/VKPipelineCache.cpp b/libraries/gpu-vk/src/gpu/vk/VKPipelineCache.cpp index de3c3163203..0df38081752 100644 --- a/libraries/gpu-vk/src/gpu/vk/VKPipelineCache.cpp +++ b/libraries/gpu-vk/src/gpu/vk/VKPipelineCache.cpp @@ -150,7 +150,8 @@ Cache::Pipeline::PipelineLayout Cache::Pipeline::getPipelineAndDescriptorLayout( Cache::Pipeline::RenderpassKey Cache::Pipeline::getRenderPassKey(gpu::Framebuffer* framebuffer) const { RenderpassKey result; if (!framebuffer) { - result.emplace_back(VK_FORMAT_R8G8B8A8_SRGB, VK_IMAGE_LAYOUT_UNDEFINED); + result.emplace_back(VK_FORMAT_R8G8B8A8_SRGB, VK_IMAGE_LAYOUT_UNDEFINED); // VKTODO: this is definitely wrong, why is it that way? + Q_ASSERT(false); } else { for (const auto& attachment : framebuffer->getRenderBuffers()) { if (attachment.isValid()) { @@ -191,19 +192,19 @@ VkRenderPass Cache::Pipeline::getRenderPass(const vks::Context& context) { auto itr = _renderPassMap.find(key); if (itr == _renderPassMap.end()) { auto &renderBuffers = framebuffer->getRenderBuffers(); - size_t renderBufferIndex = 0; std::vector attachments; attachments.reserve(key.size()); std::vector colorAttachmentReferences; VkAttachmentReference depthReference{}; - for (const auto& formatAndLayout : key) { - Q_ASSERT(renderBufferIndex < renderBuffers.size()); - std::shared_ptr texture = renderBuffers[renderBufferIndex]._texture; - if (isDepthStencilFormat(formatAndLayout.first)) { + for (size_t i = 0; i < key.size(); i++) { + Q_ASSERT(i < renderBuffers.size()); + std::shared_ptr texture = renderBuffers[i]._texture; + if (!texture) { + // Last texture of the key can be depth stencil attachment + Q_ASSERT(i + 1 == key.size()); texture = framebuffer->getDepthStencilBuffer(); } else { - texture = renderBuffers[renderBufferIndex]._texture; - renderBufferIndex++; + texture = renderBuffers[i]._texture; } VKAttachmentTexture *attachmentTexture = nullptr; if (texture) { @@ -213,11 +214,11 @@ VkRenderPass Cache::Pipeline::getRenderPass(const vks::Context& context) { } } VkAttachmentDescription attachment{}; - attachment.format = formatAndLayout.first; + attachment.format = key[i].first; // Framebuffers are always cleared with a separate command in the renderer/ if (!attachmentTexture || attachmentTexture->getVkImageLayout() == VK_IMAGE_LAYOUT_UNDEFINED) { - attachment.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; - attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; } else { attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; @@ -226,10 +227,11 @@ VkRenderPass Cache::Pipeline::getRenderPass(const vks::Context& context) { attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE; //attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attachment.samples = VK_SAMPLE_COUNT_1_BIT; - if (isDepthStencilFormat(formatAndLayout.first)) { + if (texture->isDepthStencilRenderTarget()) { if (!attachmentTexture || attachmentTexture->getVkImageLayout() == VK_IMAGE_LAYOUT_UNDEFINED) { attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; } else { if (attachmentTexture->getVkImageLayout() == VK_IMAGE_LAYOUT_GENERAL) { attachment.initialLayout = VK_IMAGE_LAYOUT_GENERAL; @@ -263,7 +265,9 @@ VkRenderPass Cache::Pipeline::getRenderPass(const vks::Context& context) { { VkSubpassDescription subpass{}; subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; - if (depthReference.layout != VK_IMAGE_LAYOUT_UNDEFINED) { + //if (depthReference.layout != VK_IMAGE_LAYOUT_UNDEFINED) { + if (framebuffer->getDepthStencilBuffer()) { + Q_ASSERT(depthReference.layout != VK_IMAGE_LAYOUT_UNDEFINED); subpass.pDepthStencilAttachment = &depthReference; } subpass.colorAttachmentCount = (uint32_t)colorAttachmentReferences.size(); diff --git a/libraries/gpu-vk/src/gpu/vk/VKShared.cpp b/libraries/gpu-vk/src/gpu/vk/VKShared.cpp index 2d561ce1dad..d3a7ed98fe9 100644 --- a/libraries/gpu-vk/src/gpu/vk/VKShared.cpp +++ b/libraries/gpu-vk/src/gpu/vk/VKShared.cpp @@ -428,7 +428,7 @@ VkFormat gpu::vk::evalTexelFormatInternal(const gpu::Element& dstFormat) { return result; } -bool gpu::vk::isDepthStencilFormat(VkFormat format) { +/*bool gpu::vk::isDepthStencilFormat(VkFormat format) { switch (format) { case VK_FORMAT_D16_UNORM: case VK_FORMAT_X8_D24_UNORM_PACK32: @@ -442,6 +442,18 @@ bool gpu::vk::isDepthStencilFormat(VkFormat format) { break; } return false; +}*/ +bool gpu::vk::formatHasStencil(VkFormat format) { + switch (format) { + case VK_FORMAT_S8_UINT: + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return true; + default: + break; + } + return false; } VkColorComponentFlags gpu::vk::colorMaskToVk(const gpu::State::ColorMask &mask) { diff --git a/libraries/gpu-vk/src/gpu/vk/VKShared.h b/libraries/gpu-vk/src/gpu/vk/VKShared.h index 83dc6623875..3c2b4e4f31b 100644 --- a/libraries/gpu-vk/src/gpu/vk/VKShared.h +++ b/libraries/gpu-vk/src/gpu/vk/VKShared.h @@ -34,7 +34,8 @@ struct ShaderObject { VkFormat evalTexelFormatInternal(const Element& dstFormat); -bool isDepthStencilFormat(VkFormat format); +//bool isDepthStencilFormat(VkFormat format); +bool formatHasStencil(VkFormat format); VkColorComponentFlags colorMaskToVk(const gpu::State::ColorMask &mask); diff --git a/libraries/gpu-vk/src/gpu/vk/VKTexture.cpp b/libraries/gpu-vk/src/gpu/vk/VKTexture.cpp index 62b1bed807d..de5435ccf3a 100644 --- a/libraries/gpu-vk/src/gpu/vk/VKTexture.cpp +++ b/libraries/gpu-vk/src/gpu/vk/VKTexture.cpp @@ -346,7 +346,7 @@ void VKStrictResourceTexture::createTexture(VKBackend &backend) { if (_gpuObject.getType() == Texture::TEX_CUBE) { Q_ASSERT(_gpuObject.getNumFaces() == 6); face_count = 6; - }else{ + } else { Q_ASSERT(_gpuObject.getNumFaces() == 1); } @@ -355,8 +355,8 @@ void VKStrictResourceTexture::createTexture(VKBackend &backend) { bool needsBGRToRGB = false; auto storedFormat = _gpuObject.getStoredMipFormat(); auto texelFormat = _gpuObject.getTexelFormat(); - if ((storedFormat.getSemantic() == gpu::BGRA || storedFormat.getSemantic() == gpu::SBGRA) - && !(texelFormat.getSemantic() == gpu::BGRA || texelFormat.getSemantic() == gpu::SBGRA)) { + if ((storedFormat.getSemantic() == gpu::BGRA || storedFormat.getSemantic() == gpu::SBGRA) && + !(texelFormat.getSemantic() == gpu::BGRA || texelFormat.getSemantic() == gpu::SBGRA)) { needsBGRToRGB = true; } auto storedVkFormat = evalTexelFormatInternal(_gpuObject.getStoredMipFormat()); @@ -364,8 +364,8 @@ void VKStrictResourceTexture::createTexture(VKBackend &backend) { if (storedFormat.getDimension() != texelFormat.getDimension()) { if (storedFormat.getDimension() == gpu::VEC3 && texelFormat.getDimension() == gpu::VEC4) { // It's best to make sure that this is not happening in unexpected cases and causing bugs - Q_ASSERT((storedVkFormat == VK_FORMAT_R8G8B8_UNORM && texelVkFormat == VK_FORMAT_R8G8B8A8_UNORM) - || (storedVkFormat == VK_FORMAT_R8G8B8_UNORM && texelVkFormat == VK_FORMAT_R8G8B8A8_SRGB)); + Q_ASSERT((storedVkFormat == VK_FORMAT_R8G8B8_UNORM && texelVkFormat == VK_FORMAT_R8G8B8A8_UNORM) || + (storedVkFormat == VK_FORMAT_R8G8B8_UNORM && texelVkFormat == VK_FORMAT_R8G8B8A8_SRGB)); needsAddingAlpha = true; } else { qDebug() << "Format mismatch, stored: " << storedVkFormat << " texel: " << texelVkFormat; @@ -408,6 +408,9 @@ void VKStrictResourceTexture::createTexture(VKBackend &backend) { allocationCI.usage = VMA_MEMORY_USAGE_GPU_ONLY; qDebug() << "storedSize: " << _gpuObject.getStoredSize(); VK_CHECK_RESULT(vmaCreateImage(vks::Allocation::getAllocator(), &imageCI, &allocationCI, &_vkImage, &_vmaAllocation, nullptr)); + /*if ( _vkImage != (VkImage)(0x260000000026UL)) { + printf("0x260000000026UL"); + }*/ } void VKStrictResourceTexture::transfer(VKBackend &backend) { diff --git a/libraries/vk/src/vk/Renderpasses.h b/libraries/vk/src/vk/Renderpasses.h index 7d9560e3500..6f70f09beec 100644 --- a/libraries/vk/src/vk/Renderpasses.h +++ b/libraries/vk/src/vk/Renderpasses.h @@ -1,151 +1 @@ #pragma once - -#include "Config.h" -#include "VulkanTools.h" - -namespace vks { namespace renderpasses { - - struct AttachmentDescription : public VkAttachmentDescription { - AttachmentDescription& withFlags(VkAttachmentDescriptionFlags flags) { - this->flags = flags; return *this; - } - AttachmentDescription& withFormat(VkFormat format) { - this->format = format; return *this; - } - AttachmentDescription& withInitialLayout(VkImageLayout layout) { - this->initialLayout = layout; return *this; - } - AttachmentDescription& withFinalLayout(VkImageLayout layout) { - this->finalLayout = layout; return *this; - } - AttachmentDescription& withSampleCount(VkSampleCountFlagBits samples) { - this->samples = samples; return *this; - } - AttachmentDescription& withLoadOp(VkAttachmentLoadOp loadOp) { - this->loadOp = loadOp; return *this; - } - AttachmentDescription& withStoreOp(VkAttachmentStoreOp storeOp) { - this->storeOp = storeOp; return *this; - } - AttachmentDescription& withStencilLoadOp(VkAttachmentLoadOp loadOp) { - this->stencilLoadOp = loadOp; return *this; - } - AttachmentDescription& withStencilStoreOp(VkAttachmentStoreOp storeOp) { - this->stencilStoreOp = storeOp; return *this; - } - }; - - struct SubpassDescription : public VkSubpassDescription { - std::vector colorAttachments; - std::vector inputAttachments; - std::vector resolveAttachments; - VkAttachmentReference depthStencilAttachment; - std::vector preserveAttachments; - - void update() { - this->colorAttachmentCount = (uint32_t)colorAttachments.size(); - this->pColorAttachments = colorAttachments.data(); - this->inputAttachmentCount = (uint32_t)inputAttachments.size(); - this->pInputAttachments = inputAttachments.data(); - this->pResolveAttachments = resolveAttachments.data(); - this->pDepthStencilAttachment = &depthStencilAttachment; - this->preserveAttachmentCount = (uint32_t)preserveAttachments.size(); - this->pPreserveAttachments = preserveAttachments.data(); - } - }; - - struct RenderPassBuilder { - std::vector attachments; - std::vector subpassDependencies; - std::vector subpasses; - - size_t addAttachment(const VkAttachmentDescription& attachment) { - attachments.push_back(attachment); - return attachments.size() - 1; - } - - VkRenderPass build(const VkDevice& device) { - for (auto& subpass : subpasses) { - subpass.update(); - } - - VkRenderPassCreateInfo renderPassInfo{}; - renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - renderPassInfo.attachmentCount = (uint32_t)attachments.size(); - renderPassInfo.pAttachments = attachments.data(); - renderPassInfo.subpassCount = (uint32_t)subpasses.size(); - renderPassInfo.pSubpasses = subpasses.data(); - renderPassInfo.dependencyCount = (uint32_t)subpassDependencies.size(); - renderPassInfo.pDependencies = subpassDependencies.data(); - VkRenderPass renderPass; - VK_CHECK_RESULT(vkCreateRenderPass(device, &renderPassInfo, nullptr, &renderPass)); - return renderPass; - } - }; - -#if 0 -subpass.pipelineBindPoint = vk::PipelineBindPoint::eGraphics; -attachments.resize(colorFormats.size()); -colorAttachmentReferences.resize(attachments.size()); -// Color attachment -for (uint32_t i = 0; i < attachments.size(); ++i) { - attachments[i].format = colorFormats[i]; - attachments[i].loadOp = vk::AttachmentLoadOp::eClear; - attachments[i].storeOp = colorFinalLayout == vk::ImageLayout::eColorAttachmentOptimal ? vk::AttachmentStoreOp::eDontCare : vk::AttachmentStoreOp::eStore; - attachments[i].initialLayout = vk::ImageLayout::eUndefined; - attachments[i].finalLayout = colorFinalLayout; - - vk::AttachmentReference& attachmentReference = colorAttachmentReferences[i]; - attachmentReference.attachment = i; - attachmentReference.layout = vk::ImageLayout::eColorAttachmentOptimal; - - subpass.colorAttachmentCount = (uint32_t)colorAttachmentReferences.size(); - subpass.pColorAttachments = colorAttachmentReferences.data(); -} - -// Do we have a depth format? -vk::AttachmentReference depthAttachmentReference; -if (depthFormat != vk::Format::eUndefined) { - vk::AttachmentDescription depthAttachment; - depthAttachment.format = depthFormat; - depthAttachment.loadOp = vk::AttachmentLoadOp::eClear; - // We might be using the depth attacment for something, so preserve it if it's final layout is not undefined - depthAttachment.storeOp = - depthFinalLayout == vk::ImageLayout::eDepthStencilAttachmentOptimal ? vk::AttachmentStoreOp::eDontCare : vk::AttachmentStoreOp::eStore; - depthAttachment.initialLayout = vk::ImageLayout::eUndefined; - depthAttachment.finalLayout = depthFinalLayout; - attachments.push_back(depthAttachment); - depthAttachmentReference.attachment = (uint32_t)attachments.size() - 1; - depthAttachmentReference.layout = vk::ImageLayout::eDepthStencilAttachmentOptimal; - subpass.pDepthStencilAttachment = &depthAttachmentReference; -} - -{ - if ((colorFinalLayout != vk::ImageLayout::eColorAttachmentOptimal) && (colorFinalLayout != vk::ImageLayout::eUndefined)) { - // Implicit transition - vk::SubpassDependency dependency; - dependency.srcSubpass = 0; - dependency.srcAccessMask = vk::AccessFlagBits::eColorAttachmentWrite; - dependency.srcStageMask = vk::PipelineStageFlagBits::eColorAttachmentOutput; - - dependency.dstSubpass = VK_SUBPASS_EXTERNAL; - dependency.dstAccessMask = vks::util::accessFlagsForLayout(colorFinalLayout); - dependency.dstStageMask = vk::PipelineStageFlagBits::eBottomOfPipe; - subpassDependencies.push_back(dependency); - } - - if ((depthFinalLayout != vk::ImageLayout::eColorAttachmentOptimal) && (depthFinalLayout != vk::ImageLayout::eUndefined)) { - // Implicit transition - vk::SubpassDependency dependency; - dependency.srcSubpass = 0; - dependency.srcAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentWrite; - dependency.srcStageMask = vk::PipelineStageFlagBits::eBottomOfPipe; - - dependency.dstSubpass = VK_SUBPASS_EXTERNAL; - dependency.dstAccessMask = vks::util::accessFlagsForLayout(depthFinalLayout); - dependency.dstStageMask = vk::PipelineStageFlagBits::eBottomOfPipe; - subpassDependencies.push_back(dependency); - } -#endif - -}} // namespace vks::renderpasses