| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef GrVkCommandBuffer_DEFINED | 8 #ifndef GrVkCommandBuffer_DEFINED |
| 9 #define GrVkCommandBuffer_DEFINED | 9 #define GrVkCommandBuffer_DEFINED |
| 10 | 10 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 | 34 |
| 35 void pipelineBarrier(const GrVkGpu* gpu, | 35 void pipelineBarrier(const GrVkGpu* gpu, |
| 36 VkPipelineStageFlags srcStageMask, | 36 VkPipelineStageFlags srcStageMask, |
| 37 VkPipelineStageFlags dstStageMask, | 37 VkPipelineStageFlags dstStageMask, |
| 38 bool byRegion, | 38 bool byRegion, |
| 39 BarrierType barrierType, | 39 BarrierType barrierType, |
| 40 void* barrier) const; | 40 void* barrier) const; |
| 41 | 41 |
| 42 void bindVertexBuffer(GrVkGpu* gpu, GrVkVertexBuffer* vbuffer) { | 42 void bindVertexBuffer(GrVkGpu* gpu, GrVkVertexBuffer* vbuffer) { |
| 43 VkBuffer vkBuffer = vbuffer->buffer(); | 43 VkBuffer vkBuffer = vbuffer->buffer(); |
| 44 // TODO: once vbuffer->offset() no longer always returns 0, we will need
to track the offset | |
| 45 // to know if we can skip binding or not. | |
| 46 if (!fBoundVertexBufferIsValid || vkBuffer != fBoundVertexBuffer) { | 44 if (!fBoundVertexBufferIsValid || vkBuffer != fBoundVertexBuffer) { |
| 47 VkDeviceSize offset = vbuffer->offset(); | 45 VkDeviceSize offset = vbuffer->offset(); |
| 48 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer, | 46 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer, |
| 49 0, | 47 0, |
| 50 1, | 48 1, |
| 51 &vkBuffer, | 49 &vkBuffer, |
| 52 &offset)); | 50 &offset)); |
| 53 fBoundVertexBufferIsValid = true; | 51 fBoundVertexBufferIsValid = true; |
| 54 fBoundVertexBuffer = vkBuffer; | 52 fBoundVertexBuffer = vkBuffer; |
| 55 addResource(vbuffer->resource()); | 53 addResource(vbuffer->resource()); |
| 56 } | 54 } |
| 57 } | 55 } |
| 58 | 56 |
| 59 void bindIndexBuffer(GrVkGpu* gpu, GrVkIndexBuffer* ibuffer) { | 57 void bindIndexBuffer(GrVkGpu* gpu, GrVkIndexBuffer* ibuffer) { |
| 60 VkBuffer vkBuffer = ibuffer->buffer(); | 58 VkBuffer vkBuffer = ibuffer->buffer(); |
| 61 // TODO: once ibuffer->offset() no longer always returns 0, we will need
to track the offset | |
| 62 // to know if we can skip binding or not. | |
| 63 if (!fBoundIndexBufferIsValid || vkBuffer != fBoundIndexBuffer) { | 59 if (!fBoundIndexBufferIsValid || vkBuffer != fBoundIndexBuffer) { |
| 64 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer, | 60 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer, |
| 65 vkBuffer, | 61 vkBuffer, |
| 66 ibuffer->offset(), | 62 ibuffer->offset(), |
| 67 VK_INDEX_TYPE_UINT
16)); | 63 VK_INDEX_TYPE_UINT
16)); |
| 68 fBoundIndexBufferIsValid = true; | 64 fBoundIndexBufferIsValid = true; |
| 69 fBoundIndexBuffer = vkBuffer; | 65 fBoundIndexBuffer = vkBuffer; |
| 70 addResource(ibuffer->resource()); | 66 addResource(ibuffer->resource()); |
| 71 } | 67 } |
| 72 } | 68 } |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 343 } | 339 } |
| 344 | 340 |
| 345 void onFreeGPUData(const GrVkGpu* gpu) const override {} | 341 void onFreeGPUData(const GrVkGpu* gpu) const override {} |
| 346 | 342 |
| 347 friend class GrVkPrimaryCommandBuffer; | 343 friend class GrVkPrimaryCommandBuffer; |
| 348 | 344 |
| 349 typedef GrVkCommandBuffer INHERITED; | 345 typedef GrVkCommandBuffer INHERITED; |
| 350 }; | 346 }; |
| 351 | 347 |
| 352 #endif | 348 #endif |
| OLD | NEW |