Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrVkMemory.h" | 8 #include "GrVkMemory.h" |
| 9 | 9 |
| 10 #include "GrVkGpu.h" | 10 #include "GrVkGpu.h" |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 54 memory)); | 54 memory)); |
| 55 if (err) { | 55 if (err) { |
| 56 return false; | 56 return false; |
| 57 } | 57 } |
| 58 return true; | 58 return true; |
| 59 } | 59 } |
| 60 | 60 |
| 61 bool GrVkMemory::AllocAndBindBufferMemory(const GrVkGpu* gpu, | 61 bool GrVkMemory::AllocAndBindBufferMemory(const GrVkGpu* gpu, |
| 62 VkBuffer buffer, | 62 VkBuffer buffer, |
| 63 const VkMemoryPropertyFlags flags, | 63 const VkMemoryPropertyFlags flags, |
| 64 VkDeviceMemory* memory) { | 64 VkDeviceMemory* memory, |
| 65 VkDeviceSize* offset) { | |
| 65 const GrVkInterface* iface = gpu->vkInterface(); | 66 const GrVkInterface* iface = gpu->vkInterface(); |
| 66 VkDevice device = gpu->device(); | 67 VkDevice device = gpu->device(); |
| 67 | 68 |
| 68 VkMemoryRequirements memReqs; | 69 VkMemoryRequirements memReqs; |
| 69 GR_VK_CALL(iface, GetBufferMemoryRequirements(device, buffer, &memReqs)); | 70 GR_VK_CALL(iface, GetBufferMemoryRequirements(device, buffer, &memReqs)); |
| 70 | 71 |
| 71 if (!alloc_device_memory(gpu, &memReqs, flags, memory)) { | 72 if (!alloc_device_memory(gpu, &memReqs, flags, memory)) { |
| 72 return false; | 73 return false; |
| 73 } | 74 } |
| 75 // for now, offset is always 0 | |
| 76 *offset = 0; | |
| 74 | 77 |
| 75 // Bind Memory to queue | 78 // Bind Memory to device |
| 76 VkResult err = GR_VK_CALL(iface, BindBufferMemory(device, buffer, *memory, 0 )); | 79 VkResult err = GR_VK_CALL(iface, BindBufferMemory(device, buffer, *memory, * offset)); |
| 77 if (err) { | 80 if (err) { |
| 78 GR_VK_CALL(iface, FreeMemory(device, *memory, nullptr)); | 81 GR_VK_CALL(iface, FreeMemory(device, *memory, nullptr)); |
| 79 return false; | 82 return false; |
| 80 } | 83 } |
| 81 return true; | 84 return true; |
| 82 } | 85 } |
| 83 | 86 |
| 87 void GrVkMemory::FreeBufferMemory(const GrVkGpu* gpu, VkDeviceMemory memory, | |
|
egdaniel
2016/05/31 13:22:14
Is the plan to treat buffer and image memory/freei
jvanverth1
2016/05/31 22:05:57
At the very least they'll have different heaps. Bu
| |
| 88 VkDeviceSize offset) { | |
| 89 const GrVkInterface* iface = gpu->vkInterface(); | |
| 90 GR_VK_CALL(iface, FreeMemory(gpu->device(), memory, nullptr)); | |
| 91 } | |
| 92 | |
| 84 bool GrVkMemory::AllocAndBindImageMemory(const GrVkGpu* gpu, | 93 bool GrVkMemory::AllocAndBindImageMemory(const GrVkGpu* gpu, |
| 85 VkImage image, | 94 VkImage image, |
| 86 const VkMemoryPropertyFlags flags, | 95 const VkMemoryPropertyFlags flags, |
| 87 VkDeviceMemory* memory) { | 96 VkDeviceMemory* memory, |
| 97 VkDeviceSize* offset) { | |
| 88 const GrVkInterface* iface = gpu->vkInterface(); | 98 const GrVkInterface* iface = gpu->vkInterface(); |
| 89 VkDevice device = gpu->device(); | 99 VkDevice device = gpu->device(); |
| 90 | 100 |
| 91 VkMemoryRequirements memReqs; | 101 VkMemoryRequirements memReqs; |
| 92 GR_VK_CALL(iface, GetImageMemoryRequirements(device, image, &memReqs)); | 102 GR_VK_CALL(iface, GetImageMemoryRequirements(device, image, &memReqs)); |
| 93 | 103 |
| 94 if (!alloc_device_memory(gpu, &memReqs, flags, memory)) { | 104 if (!alloc_device_memory(gpu, &memReqs, flags, memory)) { |
| 95 return false; | 105 return false; |
| 96 } | 106 } |
| 107 // for now, offset is always 0 | |
| 108 *offset = 0; | |
| 97 | 109 |
| 98 // Bind Memory to queue | 110 // Bind Memory to device |
| 99 VkResult err = GR_VK_CALL(iface, BindImageMemory(device, image, *memory, 0)) ; | 111 VkResult err = GR_VK_CALL(iface, BindImageMemory(device, image, *memory, *of fset)); |
| 100 if (err) { | 112 if (err) { |
| 101 GR_VK_CALL(iface, FreeMemory(device, *memory, nullptr)); | 113 GR_VK_CALL(iface, FreeMemory(device, *memory, nullptr)); |
| 102 return false; | 114 return false; |
| 103 } | 115 } |
| 104 return true; | 116 return true; |
| 105 } | 117 } |
| 106 | 118 |
| 119 void GrVkMemory::FreeImageMemory(const GrVkGpu* gpu, VkDeviceMemory memory, | |
| 120 VkDeviceSize offset) { | |
| 121 const GrVkInterface* iface = gpu->vkInterface(); | |
| 122 GR_VK_CALL(iface, FreeMemory(gpu->device(), memory, nullptr)); | |
| 123 } | |
| 124 | |
| 107 VkPipelineStageFlags GrVkMemory::LayoutToPipelineStageFlags(const VkImageLayout layout) { | 125 VkPipelineStageFlags GrVkMemory::LayoutToPipelineStageFlags(const VkImageLayout layout) { |
| 108 if (VK_IMAGE_LAYOUT_GENERAL == layout) { | 126 if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 109 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; | 127 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; |
| 110 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || | 128 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 111 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { | 129 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 112 return VK_PIPELINE_STAGE_TRANSFER_BIT; | 130 return VK_PIPELINE_STAGE_TRANSFER_BIT; |
| 113 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout || | 131 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout || |
| 114 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || | 132 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || |
| 115 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout || | 133 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout || |
| 116 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { | 134 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 146 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; | 164 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; |
| 147 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { | 165 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 148 flags = VK_ACCESS_TRANSFER_WRITE_BIT; | 166 flags = VK_ACCESS_TRANSFER_WRITE_BIT; |
| 149 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) { | 167 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) { |
| 150 flags = VK_ACCESS_TRANSFER_READ_BIT; | 168 flags = VK_ACCESS_TRANSFER_READ_BIT; |
| 151 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { | 169 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 152 flags = VK_ACCESS_SHADER_READ_BIT; | 170 flags = VK_ACCESS_SHADER_READ_BIT; |
| 153 } | 171 } |
| 154 return flags; | 172 return flags; |
| 155 } | 173 } |
| OLD | NEW |