| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef GrVkCommandBuffer_DEFINED | 8 #ifndef GrVkCommandBuffer_DEFINED |
| 9 #define GrVkCommandBuffer_DEFINED | 9 #define GrVkCommandBuffer_DEFINED |
| 10 | 10 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 | 34 |
| 35 void pipelineBarrier(const GrVkGpu* gpu, | 35 void pipelineBarrier(const GrVkGpu* gpu, |
| 36 VkPipelineStageFlags srcStageMask, | 36 VkPipelineStageFlags srcStageMask, |
| 37 VkPipelineStageFlags dstStageMask, | 37 VkPipelineStageFlags dstStageMask, |
| 38 bool byRegion, | 38 bool byRegion, |
| 39 BarrierType barrierType, | 39 BarrierType barrierType, |
| 40 void* barrier) const; | 40 void* barrier) const; |
| 41 | 41 |
| 42 void bindVertexBuffer(GrVkGpu* gpu, GrVkVertexBuffer* vbuffer) { | 42 void bindVertexBuffer(GrVkGpu* gpu, GrVkVertexBuffer* vbuffer) { |
| 43 VkBuffer vkBuffer = vbuffer->buffer(); | 43 VkBuffer vkBuffer = vbuffer->buffer(); |
| 44 // TODO: once vbuffer->offset() no longer always returns 0, we will need
to track the offset |
| 45 // to know if we can skip binding or not. |
| 44 if (!fBoundVertexBufferIsValid || vkBuffer != fBoundVertexBuffer) { | 46 if (!fBoundVertexBufferIsValid || vkBuffer != fBoundVertexBuffer) { |
| 45 VkDeviceSize offset = vbuffer->offset(); | 47 VkDeviceSize offset = vbuffer->offset(); |
| 46 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer, | 48 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer, |
| 47 0, | 49 0, |
| 48 1, | 50 1, |
| 49 &vkBuffer, | 51 &vkBuffer, |
| 50 &offset)); | 52 &offset)); |
| 51 fBoundVertexBufferIsValid = true; | 53 fBoundVertexBufferIsValid = true; |
| 52 fBoundVertexBuffer = vkBuffer; | 54 fBoundVertexBuffer = vkBuffer; |
| 53 addResource(vbuffer->resource()); | 55 addResource(vbuffer->resource()); |
| 54 } | 56 } |
| 55 } | 57 } |
| 56 | 58 |
| 57 void bindIndexBuffer(GrVkGpu* gpu, GrVkIndexBuffer* ibuffer) { | 59 void bindIndexBuffer(GrVkGpu* gpu, GrVkIndexBuffer* ibuffer) { |
| 58 VkBuffer vkBuffer = ibuffer->buffer(); | 60 VkBuffer vkBuffer = ibuffer->buffer(); |
| 61 // TODO: once ibuffer->offset() no longer always returns 0, we will need
to track the offset |
| 62 // to know if we can skip binding or not. |
| 59 if (!fBoundIndexBufferIsValid || vkBuffer != fBoundIndexBuffer) { | 63 if (!fBoundIndexBufferIsValid || vkBuffer != fBoundIndexBuffer) { |
| 60 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer, | 64 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer, |
| 61 vkBuffer, | 65 vkBuffer, |
| 62 ibuffer->offset(), | 66 ibuffer->offset(), |
| 63 VK_INDEX_TYPE_UINT
16)); | 67 VK_INDEX_TYPE_UINT
16)); |
| 64 fBoundIndexBufferIsValid = true; | 68 fBoundIndexBufferIsValid = true; |
| 65 fBoundIndexBuffer = vkBuffer; | 69 fBoundIndexBuffer = vkBuffer; |
| 66 addResource(ibuffer->resource()); | 70 addResource(ibuffer->resource()); |
| 67 } | 71 } |
| 68 } | 72 } |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 339 } | 343 } |
| 340 | 344 |
| 341 void onFreeGPUData(const GrVkGpu* gpu) const override {} | 345 void onFreeGPUData(const GrVkGpu* gpu) const override {} |
| 342 | 346 |
| 343 friend class GrVkPrimaryCommandBuffer; | 347 friend class GrVkPrimaryCommandBuffer; |
| 344 | 348 |
| 345 typedef GrVkCommandBuffer INHERITED; | 349 typedef GrVkCommandBuffer INHERITED; |
| 346 }; | 350 }; |
| 347 | 351 |
| 348 #endif | 352 #endif |
| OLD | NEW |