OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "GrInOrderDrawBuffer.h" | 8 #include "GrInOrderDrawBuffer.h" |
9 | 9 |
10 #include "GrBufferAllocPool.h" | 10 #include "GrBufferAllocPool.h" |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 draw->fType = add_trace_bit(draw->fType); | 240 draw->fType = add_trace_bit(draw->fType); |
241 } | 241 } |
242 } | 242 } |
243 | 243 |
244 return instancesToConcat; | 244 return instancesToConcat; |
245 } | 245 } |
246 | 246 |
247 void GrInOrderDrawBuffer::onDraw(const GrPipelineBuilder& pipelineBuilder, | 247 void GrInOrderDrawBuffer::onDraw(const GrPipelineBuilder& pipelineBuilder, |
248 const GrGeometryProcessor* gp, | 248 const GrGeometryProcessor* gp, |
249 const DrawInfo& info, | 249 const DrawInfo& info, |
250 const GrScissorState& scissorState) { | 250 const GrScissorState& scissorState, |
| 251 const GrDeviceCoordTexture* dstCopy) { |
251 SkASSERT(info.vertexBuffer() && (!info.isIndexed() || info.indexBuffer())); | 252 SkASSERT(info.vertexBuffer() && (!info.isIndexed() || info.indexBuffer())); |
252 | 253 |
253 // This closeBatch call is required because we may introduce new draws when
we setup clip | 254 // This closeBatch call is required because we may introduce new draws when
we setup clip |
254 this->closeBatch(); | 255 this->closeBatch(); |
255 | 256 |
256 if (!this->recordStateAndShouldDraw(pipelineBuilder, gp, scissorState, info.
getDevBounds())) { | 257 if (!this->recordStateAndShouldDraw(pipelineBuilder, gp, scissorState, dstCo
py)) { |
257 return; | 258 return; |
258 } | 259 } |
259 | 260 |
260 Draw* draw; | 261 Draw* draw; |
261 if (info.isInstanced()) { | 262 if (info.isInstanced()) { |
262 int instancesConcated = this->concatInstancedDraw(pipelineBuilder, info)
; | 263 int instancesConcated = this->concatInstancedDraw(pipelineBuilder, info)
; |
263 if (info.instanceCount() > instancesConcated) { | 264 if (info.instanceCount() > instancesConcated) { |
264 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); | 265 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); |
265 draw->fInfo.adjustInstanceCount(-instancesConcated); | 266 draw->fInfo.adjustInstanceCount(-instancesConcated); |
266 } else { | 267 } else { |
267 return; | 268 return; |
268 } | 269 } |
269 } else { | 270 } else { |
270 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); | 271 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); |
271 } | 272 } |
272 this->recordTraceMarkersIfNecessary(); | 273 this->recordTraceMarkersIfNecessary(); |
273 } | 274 } |
274 | 275 |
275 void GrInOrderDrawBuffer::onDrawBatch(GrBatch* batch, | 276 void GrInOrderDrawBuffer::onDrawBatch(GrBatch* batch, |
276 const GrPipelineBuilder& pipelineBuilder, | 277 const GrPipelineBuilder& pipelineBuilder, |
277 const GrScissorState& scissorState, | 278 const GrScissorState& scissorState, |
278 const SkRect* devBounds) { | 279 const GrDeviceCoordTexture* dstCopy) { |
279 if (!this->recordStateAndShouldDraw(batch, pipelineBuilder, scissorState, de
vBounds)) { | 280 if (!this->recordStateAndShouldDraw(batch, pipelineBuilder, scissorState, ds
tCopy)) { |
280 return; | 281 return; |
281 } | 282 } |
282 | 283 |
283 // Check if there is a Batch Draw we can batch with | 284 // Check if there is a Batch Draw we can batch with |
284 if (kDrawBatch_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { | 285 if (kDrawBatch_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { |
285 fDrawBatch = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); | 286 fDrawBatch = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); |
286 return; | 287 return; |
287 } | 288 } |
288 | 289 |
289 DrawBatch* draw = static_cast<DrawBatch*>(&fCmdBuffer.back()); | 290 DrawBatch* draw = static_cast<DrawBatch*>(&fCmdBuffer.back()); |
(...skipping 18 matching lines...) Expand all Loading... |
308 sp->fViewMatrix = pathProc->viewMatrix(); | 309 sp->fViewMatrix = pathProc->viewMatrix(); |
309 sp->fStencil = stencilSettings; | 310 sp->fStencil = stencilSettings; |
310 this->recordTraceMarkersIfNecessary(); | 311 this->recordTraceMarkersIfNecessary(); |
311 } | 312 } |
312 | 313 |
313 void GrInOrderDrawBuffer::onDrawPath(const GrPipelineBuilder& pipelineBuilder, | 314 void GrInOrderDrawBuffer::onDrawPath(const GrPipelineBuilder& pipelineBuilder, |
314 const GrPathProcessor* pathProc, | 315 const GrPathProcessor* pathProc, |
315 const GrPath* path, | 316 const GrPath* path, |
316 const GrScissorState& scissorState, | 317 const GrScissorState& scissorState, |
317 const GrStencilSettings& stencilSettings, | 318 const GrStencilSettings& stencilSettings, |
318 const SkRect* devBounds) { | 319 const GrDeviceCoordTexture* dstCopy) { |
319 this->closeBatch(); | 320 this->closeBatch(); |
320 | 321 |
321 // TODO: Only compare the subset of GrPipelineBuilder relevant to path cover
ing? | 322 // TODO: Only compare the subset of GrPipelineBuilder relevant to path cover
ing? |
322 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
devBounds)) { | 323 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { |
323 return; | 324 return; |
324 } | 325 } |
325 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); | 326 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); |
326 dp->fStencilSettings = stencilSettings; | 327 dp->fStencilSettings = stencilSettings; |
327 this->recordTraceMarkersIfNecessary(); | 328 this->recordTraceMarkersIfNecessary(); |
328 } | 329 } |
329 | 330 |
330 void GrInOrderDrawBuffer::onDrawPaths(const GrPipelineBuilder& pipelineBuilder, | 331 void GrInOrderDrawBuffer::onDrawPaths(const GrPipelineBuilder& pipelineBuilder, |
331 const GrPathProcessor* pathProc, | 332 const GrPathProcessor* pathProc, |
332 const GrPathRange* pathRange, | 333 const GrPathRange* pathRange, |
333 const void* indices, | 334 const void* indices, |
334 PathIndexType indexType, | 335 PathIndexType indexType, |
335 const float transformValues[], | 336 const float transformValues[], |
336 PathTransformType transformType, | 337 PathTransformType transformType, |
337 int count, | 338 int count, |
338 const GrScissorState& scissorState, | 339 const GrScissorState& scissorState, |
339 const GrStencilSettings& stencilSettings, | 340 const GrStencilSettings& stencilSettings, |
340 const SkRect* devBounds) { | 341 const GrDeviceCoordTexture* dstCopy) { |
341 SkASSERT(pathRange); | 342 SkASSERT(pathRange); |
342 SkASSERT(indices); | 343 SkASSERT(indices); |
343 SkASSERT(transformValues); | 344 SkASSERT(transformValues); |
344 | 345 |
345 this->closeBatch(); | 346 this->closeBatch(); |
346 | 347 |
347 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
devBounds)) { | 348 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { |
348 return; | 349 return; |
349 } | 350 } |
350 | 351 |
351 int indexBytes = GrPathRange::PathIndexSizeInBytes(indexType); | 352 int indexBytes = GrPathRange::PathIndexSizeInBytes(indexType); |
352 if (int misalign = fPathIndexBuffer.count() % indexBytes) { | 353 if (int misalign = fPathIndexBuffer.count() % indexBytes) { |
353 // Add padding to the index buffer so the indices are aligned properly. | 354 // Add padding to the index buffer so the indices are aligned properly. |
354 fPathIndexBuffer.append(indexBytes - misalign); | 355 fPathIndexBuffer.append(indexBytes - misalign); |
355 } | 356 } |
356 | 357 |
357 char* savedIndices = fPathIndexBuffer.append(count * indexBytes, | 358 char* savedIndices = fPathIndexBuffer.append(count * indexBytes, |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
478 | 479 |
479 bool isSetState = kSetState_Cmd == strip_trace_bit(iter->fType); | 480 bool isSetState = kSetState_Cmd == strip_trace_bit(iter->fType); |
480 if (isSetState) { | 481 if (isSetState) { |
481 SetState* ss = reinterpret_cast<SetState*>(iter.get()); | 482 SetState* ss = reinterpret_cast<SetState*>(iter.get()); |
482 | 483 |
483 // TODO sometimes we have a prim proc, othertimes we have a GrBatch.
Eventually we will | 484 // TODO sometimes we have a prim proc, othertimes we have a GrBatch.
Eventually we will |
484 // only have GrBatch and we can delete this | 485 // only have GrBatch and we can delete this |
485 if (ss->fPrimitiveProcessor) { | 486 if (ss->fPrimitiveProcessor) { |
486 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProc
essor, | 487 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProc
essor, |
487 ss->fPipeline, | 488 ss->fPipeline, |
| 489 ss->fPipeline.descInfo(), |
488 ss->fBatchTracker); | 490 ss->fBatchTracker); |
489 } | 491 } |
490 currentState = ss; | 492 currentState = ss; |
491 } else { | 493 } else { |
492 iter->execute(this, currentState); | 494 iter->execute(this, currentState); |
493 } | 495 } |
494 | 496 |
495 if (cmd_has_trace_marker(iter->fType)) { | 497 if (cmd_has_trace_marker(iter->fType)) { |
496 this->getGpu()->removeGpuTraceMarker(&newMarker); | 498 this->getGpu()->removeGpuTraceMarker(&newMarker); |
497 } | 499 } |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
572 cs->fDstPoint = dstPoint; | 574 cs->fDstPoint = dstPoint; |
573 this->recordTraceMarkersIfNecessary(); | 575 this->recordTraceMarkersIfNecessary(); |
574 return true; | 576 return true; |
575 } | 577 } |
576 return false; | 578 return false; |
577 } | 579 } |
578 | 580 |
579 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(const GrPipelineBuilder& pipe
lineBuilder, | 581 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(const GrPipelineBuilder& pipe
lineBuilder, |
580 const GrPrimitiveProcessor* p
rimProc, | 582 const GrPrimitiveProcessor* p
rimProc, |
581 const GrScissorState& scissor
, | 583 const GrScissorState& scissor
, |
582 const SkRect* devBounds) { | 584 const GrDeviceCoordTexture* d
stCopy) { |
583 GrDeviceCoordTexture dstCopy; | |
584 if (!this->setupDstReadIfNecessary(pipelineBuilder, &dstCopy, devBounds)) { | |
585 return false; | |
586 } | |
587 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, | 585 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, |
588 (pipelineBuilder, primProc, *this->g
etGpu()->caps(), | 586 (pipelineBuilder, primProc, *this->g
etGpu()->caps(), |
589 scissor, &dstCopy)); | 587 scissor, dstCopy)); |
590 if (ss->fPipeline.mustSkip()) { | 588 if (ss->fPipeline.mustSkip()) { |
591 fCmdBuffer.pop_back(); | 589 fCmdBuffer.pop_back(); |
592 return false; | 590 return false; |
593 } | 591 } |
594 | 592 |
595 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, | 593 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, |
596 ss->fPipeline.getInitBatchTracker(
)); | 594 ss->fPipeline.getInitBatchTracker(
)); |
597 | 595 |
598 if (fPrevState && fPrevState->fPrimitiveProcessor.get() && | 596 if (fPrevState && fPrevState->fPrimitiveProcessor.get() && |
599 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, | 597 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, |
600 *ss->fPrimitiveProcessor, | 598 *ss->fPrimitiveProcessor, |
601 ss->fBatchTracker) && | 599 ss->fBatchTracker) && |
602 fPrevState->fPipeline.isEqual(ss->fPipeline)) { | 600 fPrevState->fPipeline.isEqual(ss->fPipeline)) { |
603 fCmdBuffer.pop_back(); | 601 fCmdBuffer.pop_back(); |
604 } else { | 602 } else { |
605 fPrevState = ss; | 603 fPrevState = ss; |
606 this->recordTraceMarkersIfNecessary(); | 604 this->recordTraceMarkersIfNecessary(); |
607 } | 605 } |
608 return true; | 606 return true; |
609 } | 607 } |
610 | 608 |
611 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(GrBatch* batch, | 609 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(GrBatch* batch, |
612 const GrPipelineBuilder& pipe
lineBuilder, | 610 const GrPipelineBuilder& pipe
lineBuilder, |
613 const GrScissorState& scissor
, | 611 const GrScissorState& scissor
, |
614 const SkRect* devBounds) { | 612 const GrDeviceCoordTexture* d
stCopy) { |
615 GrDeviceCoordTexture dstCopy; | |
616 if (!this->setupDstReadIfNecessary(pipelineBuilder, &dstCopy, devBounds)) { | |
617 return false; | |
618 } | |
619 // TODO this gets much simpler when we have batches everywhere. | 613 // TODO this gets much simpler when we have batches everywhere. |
620 // If the previous command is also a set state, then we check to see if it h
as a Batch. If so, | 614 // If the previous command is also a set state, then we check to see if it h
as a Batch. If so, |
621 // and we can make the two batches equal, and we can combine the states, the
n we make them equal | 615 // and we can make the two batches equal, and we can combine the states, the
n we make them equal |
622 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, | 616 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, |
623 (batch, pipelineBuilder, *this->getG
pu()->caps(), scissor, | 617 (batch, pipelineBuilder, *this->getG
pu()->caps(), scissor, |
624 &dstCopy)); | 618 dstCopy)); |
625 if (ss->fPipeline.mustSkip()) { | 619 if (ss->fPipeline.mustSkip()) { |
626 fCmdBuffer.pop_back(); | 620 fCmdBuffer.pop_back(); |
627 return false; | 621 return false; |
628 } | 622 } |
629 | 623 |
630 batch->initBatchTracker(ss->fPipeline.getInitBatchTracker()); | 624 batch->initBatchTracker(ss->fPipeline.getInitBatchTracker()); |
631 | 625 |
632 if (fPrevState && !fPrevState->fPrimitiveProcessor.get() && | 626 if (fPrevState && !fPrevState->fPrimitiveProcessor.get() && |
633 fPrevState->fPipeline.isEqual(ss->fPipeline)) { | 627 fPrevState->fPipeline.isEqual(ss->fPipeline)) { |
634 fCmdBuffer.pop_back(); | 628 fCmdBuffer.pop_back(); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
681 int vcount = vertexCount; | 675 int vcount = vertexCount; |
682 int icount = indexCount; | 676 int icount = indexCount; |
683 | 677 |
684 if (!insideGeoPush && | 678 if (!insideGeoPush && |
685 !unreleasedVertexSpace && | 679 !unreleasedVertexSpace && |
686 !unreleasedIndexSpace && | 680 !unreleasedIndexSpace && |
687 this->geometryHints(vertexStride, &vcount, &icount)) { | 681 this->geometryHints(vertexStride, &vcount, &icount)) { |
688 this->flush(); | 682 this->flush(); |
689 } | 683 } |
690 } | 684 } |
OLD | NEW |