OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "GrInOrderDrawBuffer.h" | 8 #include "GrInOrderDrawBuffer.h" |
9 | 9 |
10 #include "GrDefaultGeoProcFactory.h" | 10 #include "GrDefaultGeoProcFactory.h" |
11 #include "GrDrawTargetCaps.h" | 11 #include "GrDrawTargetCaps.h" |
12 #include "GrGpu.h" | 12 #include "GrGpu.h" |
13 #include "GrTemplates.h" | 13 #include "GrTemplates.h" |
14 #include "GrFontCache.h" | 14 #include "GrFontCache.h" |
15 #include "GrTexture.h" | 15 #include "GrTexture.h" |
16 | 16 |
17 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, | 17 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, |
18 GrVertexBufferAllocPool* vertexPool, | 18 GrVertexBufferAllocPool* vertexPool, |
19 GrIndexBufferAllocPool* indexPool) | 19 GrIndexBufferAllocPool* indexPool) |
20 : INHERITED(gpu, vertexPool, indexPool) | 20 : INHERITED(gpu, vertexPool, indexPool) |
21 , fCmdBuffer(kCmdBufferInitialSizeInBytes) | 21 , fCmdBuffer(kCmdBufferInitialSizeInBytes) |
22 , fPrevState(NULL) | 22 , fPrevState(NULL) |
23 , fDrawID(0) { | 23 , fDrawID(0) |
| 24 , fBatchTarget(gpu, vertexPool, indexPool) { |
24 | 25 |
25 SkASSERT(vertexPool); | 26 SkASSERT(vertexPool); |
26 SkASSERT(indexPool); | 27 SkASSERT(indexPool); |
27 | 28 |
28 fPathIndexBuffer.setReserve(kPathIdxBufferMinReserve); | 29 fPathIndexBuffer.setReserve(kPathIdxBufferMinReserve); |
29 fPathTransformBuffer.setReserve(kPathXformBufferMinReserve); | 30 fPathTransformBuffer.setReserve(kPathXformBufferMinReserve); |
30 } | 31 } |
31 | 32 |
32 GrInOrderDrawBuffer::~GrInOrderDrawBuffer() { | 33 GrInOrderDrawBuffer::~GrInOrderDrawBuffer() { |
33 this->reset(); | 34 this->reset(); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
201 } | 202 } |
202 // Check if there is a draw info that is compatible that uses the same VB fr
om the pool and | 203 // Check if there is a draw info that is compatible that uses the same VB fr
om the pool and |
203 // the same IB | 204 // the same IB |
204 if (kDraw_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { | 205 if (kDraw_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { |
205 return 0; | 206 return 0; |
206 } | 207 } |
207 | 208 |
208 Draw* draw = static_cast<Draw*>(&fCmdBuffer.back()); | 209 Draw* draw = static_cast<Draw*>(&fCmdBuffer.back()); |
209 | 210 |
210 if (!draw->fInfo.isInstanced() || | 211 if (!draw->fInfo.isInstanced() || |
| 212 draw->fInfo.primitiveType() != info.primitiveType() || |
211 draw->fInfo.verticesPerInstance() != info.verticesPerInstance() || | 213 draw->fInfo.verticesPerInstance() != info.verticesPerInstance() || |
212 draw->fInfo.indicesPerInstance() != info.indicesPerInstance() || | 214 draw->fInfo.indicesPerInstance() != info.indicesPerInstance() || |
213 draw->fInfo.vertexBuffer() != info.vertexBuffer() || | 215 draw->fInfo.vertexBuffer() != info.vertexBuffer() || |
214 draw->fInfo.indexBuffer() != geomSrc.fIndexBuffer) { | 216 draw->fInfo.indexBuffer() != geomSrc.fIndexBuffer) { |
215 return 0; | 217 return 0; |
216 } | 218 } |
217 if (draw->fInfo.startVertex() + draw->fInfo.vertexCount() != info.startVerte
x()) { | 219 if (draw->fInfo.startVertex() + draw->fInfo.vertexCount() != info.startVerte
x()) { |
218 return 0; | 220 return 0; |
219 } | 221 } |
220 | 222 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
257 draw->fInfo.adjustInstanceCount(-instancesConcated); | 259 draw->fInfo.adjustInstanceCount(-instancesConcated); |
258 } else { | 260 } else { |
259 return; | 261 return; |
260 } | 262 } |
261 } else { | 263 } else { |
262 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); | 264 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); |
263 } | 265 } |
264 this->recordTraceMarkersIfNecessary(); | 266 this->recordTraceMarkersIfNecessary(); |
265 } | 267 } |
266 | 268 |
| 269 void GrInOrderDrawBuffer::onDrawBatch(GrBatch* batch, |
| 270 const GrDrawState& ds, |
| 271 GrPrimitiveType type, |
| 272 const GrScissorState& scissorState, |
| 273 const GrDeviceCoordTexture* dstCopy) { |
| 274 if (!this->recordStateAndShouldDraw(batch, ds, scissorState, dstCopy)) { |
| 275 return; |
| 276 } |
| 277 |
| 278 // Check if there is a Batch Draw we can batch with |
| 279 if (kBatchDraw != strip_trace_bit(fCmdBuffer.back().fType)) { |
| 280 GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); |
| 281 return; |
| 282 } |
| 283 |
| 284 DrawBatch* draw = static_cast<DrawBatch*>(&fCmdBuffer.back()); |
| 285 if (draw->fBatch->combineIfPossible(batch)) { |
| 286 return; |
| 287 } else { |
| 288 GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); |
| 289 } |
| 290 this->recordTraceMarkersIfNecessary(); |
| 291 } |
| 292 |
267 void GrInOrderDrawBuffer::onStencilPath(const GrDrawState& ds, | 293 void GrInOrderDrawBuffer::onStencilPath(const GrDrawState& ds, |
268 const GrPathProcessor* pathProc, | 294 const GrPathProcessor* pathProc, |
269 const GrPath* path, | 295 const GrPath* path, |
270 const GrScissorState& scissorState, | 296 const GrScissorState& scissorState, |
271 const GrStencilSettings& stencilSettings
) { | 297 const GrStencilSettings& stencilSettings
) { |
272 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, | 298 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, |
273 (path, ds.getRenderTarget())); | 299 (path, ds.getRenderTarget())); |
274 sp->fScissor = scissorState; | 300 sp->fScissor = scissorState; |
275 sp->fUseHWAA = ds.isHWAntialias(); | 301 sp->fUseHWAA = ds.isHWAntialias(); |
276 sp->fViewMatrix = pathProc->viewMatrix(); | 302 sp->fViewMatrix = pathProc->viewMatrix(); |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
401 reset_data_buffer(&fPathIndexBuffer, kPathIdxBufferMinReserve); | 427 reset_data_buffer(&fPathIndexBuffer, kPathIdxBufferMinReserve); |
402 reset_data_buffer(&fPathTransformBuffer, kPathXformBufferMinReserve); | 428 reset_data_buffer(&fPathTransformBuffer, kPathXformBufferMinReserve); |
403 fGpuCmdMarkers.reset(); | 429 fGpuCmdMarkers.reset(); |
404 } | 430 } |
405 | 431 |
406 void GrInOrderDrawBuffer::onFlush() { | 432 void GrInOrderDrawBuffer::onFlush() { |
407 if (fCmdBuffer.empty()) { | 433 if (fCmdBuffer.empty()) { |
408 return; | 434 return; |
409 } | 435 } |
410 | 436 |
411 | |
412 CmdBuffer::Iter iter(fCmdBuffer); | 437 CmdBuffer::Iter iter(fCmdBuffer); |
413 | 438 |
414 int currCmdMarker = 0; | 439 int currCmdMarker = 0; |
415 | 440 |
416 // Updated every time we find a set state cmd to reflect the current state i
n the playback | 441 // Updated every time we find a set state cmd to reflect the current state i
n the playback |
417 // stream. | 442 // stream. |
418 SetState* currentState = NULL; | 443 SetState* currentState = NULL; |
419 | 444 |
| 445 // TODO to prevent flushing the batch buffer too much, we only flush when wa
sBatch && !isBatch |
| 446 // In the long term we can delete this and just flush once at the end of all
geometry generation |
| 447 bool wasBatch = false; |
| 448 |
420 while (iter.next()) { | 449 while (iter.next()) { |
421 GrGpuTraceMarker newMarker("", -1); | 450 GrGpuTraceMarker newMarker("", -1); |
422 SkString traceString; | 451 SkString traceString; |
423 if (cmd_has_trace_marker(iter->fType)) { | 452 if (cmd_has_trace_marker(iter->fType)) { |
424 traceString = fGpuCmdMarkers[currCmdMarker].toString(); | 453 traceString = fGpuCmdMarkers[currCmdMarker].toString(); |
425 newMarker.fMarker = traceString.c_str(); | 454 newMarker.fMarker = traceString.c_str(); |
426 this->getGpu()->addGpuTraceMarker(&newMarker); | 455 this->getGpu()->addGpuTraceMarker(&newMarker); |
427 ++currCmdMarker; | 456 ++currCmdMarker; |
428 } | 457 } |
429 | 458 |
430 if (kSetState_Cmd == strip_trace_bit(iter->fType)) { | 459 if (kSetState_Cmd == strip_trace_bit(iter->fType)) { |
431 SetState* ss = reinterpret_cast<SetState*>(iter.get()); | 460 SetState* ss = reinterpret_cast<SetState*>(iter.get()); |
432 | 461 |
433 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProcesso
r, ss->fState, | 462 // TODO sometimes we have a prim proc, othertimes we have a GrBatch.
Eventually we will |
434 ss->fState.descInfo(), ss->fBatchTr
acker); | 463 // only have GrBatch and we can delete this |
435 currentState = ss; | 464 if (ss->fPrimitiveProcessor) { |
| 465 // TODO see note above, this gets deleted once everyone uses bat
ch drawing |
| 466 if (wasBatch) { |
| 467 wasBatch = false; |
| 468 fBatchTarget.flush(); |
| 469 } |
436 | 470 |
| 471 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProc
essor, ss->fState, |
| 472 ss->fState.descInfo(), |
| 473 ss->fBatchTracker); |
| 474 } else { |
| 475 wasBatch = true; |
| 476 } |
| 477 currentState = ss; |
437 } else { | 478 } else { |
438 iter->execute(this, currentState); | 479 iter->execute(this, currentState); |
439 } | 480 } |
440 | 481 |
441 if (cmd_has_trace_marker(iter->fType)) { | 482 if (cmd_has_trace_marker(iter->fType)) { |
442 this->getGpu()->removeGpuTraceMarker(&newMarker); | 483 this->getGpu()->removeGpuTraceMarker(&newMarker); |
443 } | 484 } |
444 } | 485 } |
445 | 486 |
| 487 // TODO see note above, one last catch |
| 488 if (wasBatch) { |
| 489 fBatchTarget.flush(); |
| 490 } |
| 491 |
446 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); | 492 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); |
447 ++fDrawID; | 493 ++fDrawID; |
448 } | 494 } |
449 | 495 |
450 void GrInOrderDrawBuffer::Draw::execute(GrInOrderDrawBuffer* buf, const SetState
* state) { | 496 void GrInOrderDrawBuffer::Draw::execute(GrInOrderDrawBuffer* buf, const SetState
* state) { |
451 SkASSERT(state); | 497 SkASSERT(state); |
452 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fState, &state->fDes
c, | 498 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fState, &state->fDes
c, |
453 &state->fBatchTracker); | 499 &state->fBatchTracker); |
454 buf->getGpu()->draw(args, fInfo); | 500 buf->getGpu()->draw(args, fInfo); |
455 } | 501 } |
(...skipping 19 matching lines...) Expand all Loading... |
475 void GrInOrderDrawBuffer::DrawPaths::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { | 521 void GrInOrderDrawBuffer::DrawPaths::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { |
476 SkASSERT(state); | 522 SkASSERT(state); |
477 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fState, &state->fDes
c, | 523 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fState, &state->fDes
c, |
478 &state->fBatchTracker); | 524 &state->fBatchTracker); |
479 buf->getGpu()->drawPaths(args, this->pathRange(), | 525 buf->getGpu()->drawPaths(args, this->pathRange(), |
480 &buf->fPathIndexBuffer[fIndicesLocation], fIndexType
, | 526 &buf->fPathIndexBuffer[fIndicesLocation], fIndexType
, |
481 &buf->fPathTransformBuffer[fTransformsLocation], fTr
ansformType, | 527 &buf->fPathTransformBuffer[fTransformsLocation], fTr
ansformType, |
482 fCount, fStencilSettings); | 528 fCount, fStencilSettings); |
483 } | 529 } |
484 | 530 |
| 531 void GrInOrderDrawBuffer::DrawBatch::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { |
| 532 SkASSERT(state); |
| 533 fBatch->generateGeometry(buf->getBatchTarget(), &state->fState); |
| 534 } |
| 535 |
485 void GrInOrderDrawBuffer::SetState::execute(GrInOrderDrawBuffer*, const SetState
*) {} | 536 void GrInOrderDrawBuffer::SetState::execute(GrInOrderDrawBuffer*, const SetState
*) {} |
486 | 537 |
487 void GrInOrderDrawBuffer::Clear::execute(GrInOrderDrawBuffer* buf, const SetStat
e*) { | 538 void GrInOrderDrawBuffer::Clear::execute(GrInOrderDrawBuffer* buf, const SetStat
e*) { |
488 if (GrColor_ILLEGAL == fColor) { | 539 if (GrColor_ILLEGAL == fColor) { |
489 buf->getGpu()->discard(this->renderTarget()); | 540 buf->getGpu()->discard(this->renderTarget()); |
490 } else { | 541 } else { |
491 buf->getGpu()->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget(
)); | 542 buf->getGpu()->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget(
)); |
492 } | 543 } |
493 } | 544 } |
494 | 545 |
(...skipping 27 matching lines...) Expand all Loading... |
522 (ds, primProc, *this->getGpu()->caps
(), scissor, | 573 (ds, primProc, *this->getGpu()->caps
(), scissor, |
523 dstCopy)); | 574 dstCopy)); |
524 if (ss->fState.mustSkip()) { | 575 if (ss->fState.mustSkip()) { |
525 fCmdBuffer.pop_back(); | 576 fCmdBuffer.pop_back(); |
526 return false; | 577 return false; |
527 } | 578 } |
528 | 579 |
529 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, | 580 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, |
530 ss->fState.getInitBatchTracker()); | 581 ss->fState.getInitBatchTracker()); |
531 | 582 |
532 if (fPrevState && | 583 if (fPrevState && fPrevState->fPrimitiveProcessor.get() && |
533 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, | 584 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, |
534 *ss->fPrimitiveProcessor, | 585 *ss->fPrimitiveProcessor, |
535 ss->fBatchTracker) && | 586 ss->fBatchTracker) && |
536 fPrevState->fState.isEqual(ss->fState)) { | 587 fPrevState->fState.isEqual(ss->fState)) { |
537 fCmdBuffer.pop_back(); | 588 fCmdBuffer.pop_back(); |
538 } else { | 589 } else { |
539 fPrevState = ss; | 590 fPrevState = ss; |
540 this->recordTraceMarkersIfNecessary(); | 591 this->recordTraceMarkersIfNecessary(); |
541 } | 592 } |
542 return true; | 593 return true; |
543 } | 594 } |
544 | 595 |
| 596 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(GrBatch* batch, |
| 597 const GrDrawState& ds, |
| 598 const GrScissorState& scissor
, |
| 599 const GrDeviceCoordTexture* d
stCopy) { |
| 600 // TODO this gets much simpler when we have batches everywhere. |
| 601 // If the previous command is also a set state, then we check to see if it h
as a Batch. If so, |
| 602 // and we can make the two batches equal, and we can combine the states, the
n we make them equal |
| 603 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, |
| 604 (batch, ds, *this->getGpu()->caps(),
scissor, |
| 605 dstCopy)); |
| 606 if (ss->fState.mustSkip()) { |
| 607 fCmdBuffer.pop_back(); |
| 608 return false; |
| 609 } |
| 610 |
| 611 batch->initBatchTracker(ss->fState.getInitBatchTracker()); |
| 612 |
| 613 if (fPrevState && !fPrevState->fPrimitiveProcessor.get() && |
| 614 fPrevState->fState.isEqual(ss->fState)) { |
| 615 fCmdBuffer.pop_back(); |
| 616 } else { |
| 617 fPrevState = ss; |
| 618 this->recordTraceMarkersIfNecessary(); |
| 619 } |
| 620 return true; |
| 621 } |
| 622 |
545 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { | 623 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { |
546 SkASSERT(!fCmdBuffer.empty()); | 624 SkASSERT(!fCmdBuffer.empty()); |
547 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); | 625 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); |
548 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); | 626 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); |
549 if (activeTraceMarkers.count() > 0) { | 627 if (activeTraceMarkers.count() > 0) { |
550 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); | 628 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); |
551 fGpuCmdMarkers.push_back(activeTraceMarkers); | 629 fGpuCmdMarkers.push_back(activeTraceMarkers); |
552 } | 630 } |
553 } | 631 } |
OLD | NEW |