| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrVkMemory.h" | 8 #include "GrVkMemory.h" |
| 9 | 9 |
| 10 #include "GrVkGpu.h" | 10 #include "GrVkGpu.h" |
| (...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 207 VkPipelineStageFlags GrVkMemory::LayoutToPipelineStageFlags(const VkImageLayout
layout) { | 207 VkPipelineStageFlags GrVkMemory::LayoutToPipelineStageFlags(const VkImageLayout
layout) { |
| 208 if (VK_IMAGE_LAYOUT_GENERAL == layout) { | 208 if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 209 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; | 209 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; |
| 210 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || | 210 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 211 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { | 211 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 212 return VK_PIPELINE_STAGE_TRANSFER_BIT; | 212 return VK_PIPELINE_STAGE_TRANSFER_BIT; |
| 213 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout || | 213 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout || |
| 214 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || | 214 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || |
| 215 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout || | 215 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout || |
| 216 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { | 216 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 217 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; | 217 return VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT; |
| 218 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { | 218 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 219 return VK_PIPELINE_STAGE_HOST_BIT; | 219 return VK_PIPELINE_STAGE_HOST_BIT; |
| 220 } | 220 } |
| 221 | 221 |
| 222 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout); | 222 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout); |
| 223 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; | 223 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 224 } | 224 } |
| 225 | 225 |
| 226 VkAccessFlags GrVkMemory::LayoutToSrcAccessMask(const VkImageLayout layout) { | 226 VkAccessFlags GrVkMemory::LayoutToSrcAccessMask(const VkImageLayout layout) { |
| 227 // Currently we assume we will never being doing any explict shader writes (
this doesn't include | 227 // Currently we assume we will never being doing any explict shader writes (
this doesn't include |
| (...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 591 fSubHeaps[i]->free(alloc); | 591 fSubHeaps[i]->free(alloc); |
| 592 fUsedSize -= alloc.fSize; | 592 fUsedSize -= alloc.fSize; |
| 593 return true; | 593 return true; |
| 594 } | 594 } |
| 595 } | 595 } |
| 596 | 596 |
| 597 return false; | 597 return false; |
| 598 } | 598 } |
| 599 | 599 |
| 600 | 600 |
| OLD | NEW |