| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrVkGpu.h" | 8 #include "GrVkGpu.h" |
| 9 #include "GrVkImage.h" | 9 #include "GrVkImage.h" |
| 10 #include "GrVkMemory.h" | 10 #include "GrVkMemory.h" |
| (...skipping 13 matching lines...) Expand all Loading... |
| 24 return VK_IMAGE_ASPECT_COLOR_BIT; | 24 return VK_IMAGE_ASPECT_COLOR_BIT; |
| 25 } | 25 } |
| 26 } | 26 } |
| 27 | 27 |
| 28 void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout, | 28 void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout, |
| 29 VkAccessFlags dstAccessMask, | 29 VkAccessFlags dstAccessMask, |
| 30 VkPipelineStageFlags dstStageMask, | 30 VkPipelineStageFlags dstStageMask, |
| 31 bool byRegion) { | 31 bool byRegion) { |
| 32 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout && | 32 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout && |
| 33 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout); | 33 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout); |
| 34 VkImageLayout currentLayout = this->currentLayout(); |
| 34 // Is this reasonable? Could someone want to keep the same layout but use th
e masks to force | 35 // Is this reasonable? Could someone want to keep the same layout but use th
e masks to force |
| 35 // a barrier on certain things? | 36 // a barrier on certain things? |
| 36 if (newLayout == fCurrentLayout) { | 37 if (newLayout == currentLayout) { |
| 37 return; | 38 return; |
| 38 } | 39 } |
| 39 | 40 |
| 40 VkAccessFlags srcAccessMask = GrVkMemory::LayoutToSrcAccessMask(fCurrentLayo
ut); | 41 VkAccessFlags srcAccessMask = GrVkMemory::LayoutToSrcAccessMask(currentLayou
t); |
| 41 VkPipelineStageFlags srcStageMask = GrVkMemory::LayoutToPipelineStageFlags(f
CurrentLayout); | 42 VkPipelineStageFlags srcStageMask = GrVkMemory::LayoutToPipelineStageFlags(c
urrentLayout); |
| 42 | 43 |
| 43 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fResource->fForma
t); | 44 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat); |
| 44 VkImageMemoryBarrier imageMemoryBarrier = { | 45 VkImageMemoryBarrier imageMemoryBarrier = { |
| 45 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType | 46 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType |
| 46 NULL, // pNext | 47 NULL, // pNext |
| 47 srcAccessMask, // outputMask | 48 srcAccessMask, // outputMask |
| 48 dstAccessMask, // inputMask | 49 dstAccessMask, // inputMask |
| 49 fCurrentLayout, // oldLayout | 50 currentLayout, // oldLayout |
| 50 newLayout, // newLayout | 51 newLayout, // newLayout |
| 51 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex | 52 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex |
| 52 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex | 53 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex |
| 53 fResource->fImage, // image | 54 fInfo.fImage, // image |
| 54 { aspectFlags, 0, fResource->fLevelCount, 0, 1 } // subresourceRange | 55 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange |
| 55 }; | 56 }; |
| 56 | 57 |
| 57 gpu->addImageMemoryBarrier(srcStageMask, dstStageMask, byRegion, &imageMemor
yBarrier); | 58 gpu->addImageMemoryBarrier(srcStageMask, dstStageMask, byRegion, &imageMemor
yBarrier); |
| 58 | 59 |
| 59 fCurrentLayout = newLayout; | 60 fInfo.fImageLayout = newLayout; |
| 60 } | 61 } |
| 61 | 62 |
| 62 const GrVkImage::Resource* GrVkImage::CreateResource(const GrVkGpu* gpu, | 63 bool GrVkImage::GetImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrV
kImageInfo* info) { |
| 63 const ImageDesc& imageDesc)
{ | |
| 64 VkImage image = 0; | 64 VkImage image = 0; |
| 65 VkDeviceMemory alloc; | 65 VkDeviceMemory alloc; |
| 66 | 66 |
| 67 VkImageLayout initialLayout = (VK_IMAGE_TILING_LINEAR == imageDesc.fImageTil
ing) | 67 VkImageLayout initialLayout = (VK_IMAGE_TILING_LINEAR == imageDesc.fImageTil
ing) |
| 68 ? VK_IMAGE_LAYOUT_PREINITIALIZED | 68 ? VK_IMAGE_LAYOUT_PREINITIALIZED |
| 69 : VK_IMAGE_LAYOUT_UNDEFINED; | 69 : VK_IMAGE_LAYOUT_UNDEFINED; |
| 70 | 70 |
| 71 // Create Image | 71 // Create Image |
| 72 VkSampleCountFlagBits vkSamples; | 72 VkSampleCountFlagBits vkSamples; |
| 73 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) { | 73 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) { |
| 74 return nullptr; | 74 return false; |
| 75 } | 75 } |
| 76 | 76 |
| 77 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling || | 77 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling || |
| 78 VK_SAMPLE_COUNT_1_BIT == vkSamples); | 78 VK_SAMPLE_COUNT_1_BIT == vkSamples); |
| 79 | 79 |
| 80 const VkImageCreateInfo imageCreateInfo = { | 80 const VkImageCreateInfo imageCreateInfo = { |
| 81 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType | 81 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
| 82 NULL, // pNext | 82 NULL, // pNext |
| 83 0, // VkImageCreateFlags | 83 0, // VkImageCreateFlags |
| 84 imageDesc.fImageType, // VkImageType | 84 imageDesc.fImageType, // VkImageType |
| 85 imageDesc.fFormat, // VkFormat | 85 imageDesc.fFormat, // VkFormat |
| 86 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D | 86 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D |
| 87 imageDesc.fLevels, // mipLevels | 87 imageDesc.fLevels, // mipLevels |
| 88 1, // arrayLayers | 88 1, // arrayLayers |
| 89 vkSamples, // samples | 89 vkSamples, // samples |
| 90 imageDesc.fImageTiling, // VkImageTiling | 90 imageDesc.fImageTiling, // VkImageTiling |
| 91 imageDesc.fUsageFlags, // VkImageUsageFlags | 91 imageDesc.fUsageFlags, // VkImageUsageFlags |
| 92 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode | 92 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
| 93 0, // queueFamilyCount | 93 0, // queueFamilyCount |
| 94 0, // pQueueFamilyIndices | 94 0, // pQueueFamilyIndices |
| 95 initialLayout // initialLayout | 95 initialLayout // initialLayout |
| 96 }; | 96 }; |
| 97 | 97 |
| 98 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCre
ateInfo, nullptr, &image)); | 98 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCre
ateInfo, nullptr, |
| 99 &image)); |
| 99 | 100 |
| 100 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, imageDesc.fMemProps, &a
lloc)) { | 101 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, imageDesc.fMemProps, &a
lloc)) { |
| 101 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr)); | 102 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr)); |
| 102 return nullptr; | 103 return false; |
| 103 } | 104 } |
| 104 | 105 |
| 105 GrVkImage::Resource::Flags flags = | 106 info->fImage = image; |
| 106 (VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling) ? Resource::kLinearTi
ling_Flag | 107 info->fAlloc = alloc; |
| 107 : Resource::kNo_Flags
; | 108 info->fImageTiling = imageDesc.fImageTiling; |
| 109 info->fImageLayout = initialLayout; |
| 110 info->fFormat = imageDesc.fFormat; |
| 111 info->fLevelCount = imageDesc.fLevels; |
| 112 return true; |
| 113 } |
| 108 | 114 |
| 109 return (new GrVkImage::Resource(image, alloc, imageDesc.fFormat, imageDesc.f
Levels, flags)); | 115 void GrVkImage::setNewResource(VkImage image, VkDeviceMemory alloc) { |
| 116 fResource = new Resource(image, alloc); |
| 110 } | 117 } |
| 111 | 118 |
| 112 GrVkImage::~GrVkImage() { | 119 GrVkImage::~GrVkImage() { |
| 113 // should have been released or abandoned first | 120 // should have been released or abandoned first |
| 114 SkASSERT(!fResource); | 121 SkASSERT(!fResource); |
| 115 } | 122 } |
| 116 | 123 |
| 117 void GrVkImage::releaseImage(const GrVkGpu* gpu) { | 124 void GrVkImage::releaseImage(const GrVkGpu* gpu) { |
| 118 if (fResource) { | 125 if (fResource) { |
| 119 fResource->unref(gpu); | 126 fResource->unref(gpu); |
| 120 fResource = nullptr; | 127 fResource = nullptr; |
| 121 } | 128 } |
| 122 } | 129 } |
| 123 | 130 |
| 124 void GrVkImage::abandonImage() { | 131 void GrVkImage::abandonImage() { |
| 125 if (fResource) { | 132 if (fResource) { |
| 126 fResource->unrefAndAbandon(); | 133 fResource->unrefAndAbandon(); |
| 127 fResource = nullptr; | 134 fResource = nullptr; |
| 128 } | 135 } |
| 129 } | 136 } |
| 130 | 137 |
| 131 void GrVkImage::Resource::freeGPUData(const GrVkGpu* gpu) const { | 138 void GrVkImage::Resource::freeGPUData(const GrVkGpu* gpu) const { |
| 132 VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr)); | 139 VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr)); |
| 133 VK_CALL(gpu, FreeMemory(gpu->device(), fAlloc, nullptr)); | 140 VK_CALL(gpu, FreeMemory(gpu->device(), fAlloc, nullptr)); |
| 134 } | 141 } |
| 135 | 142 |
| 136 void GrVkImage::BorrowedResource::freeGPUData(const GrVkGpu* gpu) const { | 143 void GrVkImage::BorrowedResource::freeGPUData(const GrVkGpu* gpu) const { |
| 137 } | 144 } |
| OLD | NEW |