OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "GrInOrderDrawBuffer.h" | 8 #include "GrInOrderDrawBuffer.h" |
9 | 9 |
10 #include "GrBufferAllocPool.h" | 10 #include "GrBufferAllocPool.h" |
11 #include "GrDrawTargetCaps.h" | 11 #include "GrDrawTargetCaps.h" |
| 12 #include "GrGpu.h" |
| 13 #include "GrOptDrawState.h" |
| 14 #include "GrTemplates.h" |
12 #include "GrTextStrike.h" | 15 #include "GrTextStrike.h" |
13 #include "GrGpu.h" | |
14 #include "GrTemplates.h" | |
15 #include "GrTexture.h" | 16 #include "GrTexture.h" |
16 | 17 |
17 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, | 18 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, |
18 GrVertexBufferAllocPool* vertexPool, | 19 GrVertexBufferAllocPool* vertexPool, |
19 GrIndexBufferAllocPool* indexPool) | 20 GrIndexBufferAllocPool* indexPool) |
20 : INHERITED(gpu->getContext()) | 21 : INHERITED(gpu->getContext()) |
21 , fCmdBuffer(kCmdBufferInitialSizeInBytes) | 22 , fCmdBuffer(kCmdBufferInitialSizeInBytes) |
22 , fLastState(NULL) | 23 , fLastState(NULL) |
23 , fDstGpu(gpu) | 24 , fDstGpu(gpu) |
24 , fVertexPool(*vertexPool) | 25 , fVertexPool(*vertexPool) |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 | 240 |
240 return instancesToConcat; | 241 return instancesToConcat; |
241 } | 242 } |
242 | 243 |
243 void GrInOrderDrawBuffer::onDraw(const DrawInfo& info, | 244 void GrInOrderDrawBuffer::onDraw(const DrawInfo& info, |
244 const GrClipMaskManager::ScissorState& scissorS
tate) { | 245 const GrClipMaskManager::ScissorState& scissorS
tate) { |
245 | 246 |
246 GeometryPoolState& poolState = fGeoPoolStateStack.back(); | 247 GeometryPoolState& poolState = fGeoPoolStateStack.back(); |
247 const GrDrawState& drawState = this->getDrawState(); | 248 const GrDrawState& drawState = this->getDrawState(); |
248 | 249 |
249 this->recordStateIfNecessary(); | 250 this->recordStateIfNecessary(GrGpu::PrimTypeToDrawType(info.primitiveType())
, |
| 251 info.getDstCopy()); |
250 | 252 |
251 const GrVertexBuffer* vb; | 253 const GrVertexBuffer* vb; |
252 if (kBuffer_GeometrySrcType == this->getGeomSrc().fVertexSrc) { | 254 if (kBuffer_GeometrySrcType == this->getGeomSrc().fVertexSrc) { |
253 vb = this->getGeomSrc().fVertexBuffer; | 255 vb = this->getGeomSrc().fVertexBuffer; |
254 } else { | 256 } else { |
255 vb = poolState.fPoolVertexBuffer; | 257 vb = poolState.fPoolVertexBuffer; |
256 } | 258 } |
257 | 259 |
258 const GrIndexBuffer* ib = NULL; | 260 const GrIndexBuffer* ib = NULL; |
259 if (info.isIndexed()) { | 261 if (info.isIndexed()) { |
(...skipping 30 matching lines...) Expand all Loading... |
290 size_t bytes = (info.indexCount() + info.startIndex()) * sizeof(uint16_t
); | 292 size_t bytes = (info.indexCount() + info.startIndex()) * sizeof(uint16_t
); |
291 poolState.fUsedPoolIndexBytes = SkTMax(poolState.fUsedPoolIndexBytes, by
tes); | 293 poolState.fUsedPoolIndexBytes = SkTMax(poolState.fUsedPoolIndexBytes, by
tes); |
292 draw->fInfo.adjustStartIndex(poolState.fPoolStartIndex); | 294 draw->fInfo.adjustStartIndex(poolState.fPoolStartIndex); |
293 } | 295 } |
294 } | 296 } |
295 | 297 |
296 void GrInOrderDrawBuffer::onStencilPath(const GrPath* path, | 298 void GrInOrderDrawBuffer::onStencilPath(const GrPath* path, |
297 const GrClipMaskManager::ScissorState& s
cissorState, | 299 const GrClipMaskManager::ScissorState& s
cissorState, |
298 const GrStencilSettings& stencilSettings
) { | 300 const GrStencilSettings& stencilSettings
) { |
299 // Only compare the subset of GrDrawState relevant to path stenciling? | 301 // Only compare the subset of GrDrawState relevant to path stenciling? |
300 this->recordStateIfNecessary(); | 302 this->recordStateIfNecessary(GrGpu::kStencilPath_DrawType, NULL); |
301 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, (path)); | 303 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, (path)); |
302 sp->fScissorState = scissorState; | 304 sp->fScissorState = scissorState; |
303 sp->fStencilSettings = stencilSettings; | 305 sp->fStencilSettings = stencilSettings; |
304 this->recordTraceMarkersIfNecessary(); | 306 this->recordTraceMarkersIfNecessary(); |
305 } | 307 } |
306 | 308 |
307 void GrInOrderDrawBuffer::onDrawPath(const GrPath* path, | 309 void GrInOrderDrawBuffer::onDrawPath(const GrPath* path, |
308 const GrClipMaskManager::ScissorState& scis
sorState, | 310 const GrClipMaskManager::ScissorState& scis
sorState, |
309 const GrStencilSettings& stencilSettings, | 311 const GrStencilSettings& stencilSettings, |
310 const GrDeviceCoordTexture* dstCopy) { | 312 const GrDeviceCoordTexture* dstCopy) { |
311 // TODO: Only compare the subset of GrDrawState relevant to path covering? | 313 // TODO: Only compare the subset of GrDrawState relevant to path covering? |
312 this->recordStateIfNecessary(); | 314 this->recordStateIfNecessary(GrGpu::kDrawPath_DrawType, dstCopy); |
313 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); | 315 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); |
314 if (dstCopy) { | 316 if (dstCopy) { |
315 dp->fDstCopy = *dstCopy; | 317 dp->fDstCopy = *dstCopy; |
316 } | 318 } |
317 dp->fScissorState = scissorState; | 319 dp->fScissorState = scissorState; |
318 dp->fStencilSettings = stencilSettings; | 320 dp->fStencilSettings = stencilSettings; |
319 this->recordTraceMarkersIfNecessary(); | 321 this->recordTraceMarkersIfNecessary(); |
320 } | 322 } |
321 | 323 |
322 void GrInOrderDrawBuffer::onDrawPaths(const GrPathRange* pathRange, | 324 void GrInOrderDrawBuffer::onDrawPaths(const GrPathRange* pathRange, |
323 const uint32_t indices[], | 325 const uint32_t indices[], |
324 int count, | 326 int count, |
325 const float transforms[], | 327 const float transforms[], |
326 PathTransformType transformsType, | 328 PathTransformType transformsType, |
327 const GrClipMaskManager::ScissorState& sci
ssorState, | 329 const GrClipMaskManager::ScissorState& sci
ssorState, |
328 const GrStencilSettings& stencilSettings, | 330 const GrStencilSettings& stencilSettings, |
329 const GrDeviceCoordTexture* dstCopy) { | 331 const GrDeviceCoordTexture* dstCopy) { |
330 SkASSERT(pathRange); | 332 SkASSERT(pathRange); |
331 SkASSERT(indices); | 333 SkASSERT(indices); |
332 SkASSERT(transforms); | 334 SkASSERT(transforms); |
333 | 335 |
334 this->recordStateIfNecessary(); | 336 this->recordStateIfNecessary(GrGpu::kDrawPaths_DrawType, dstCopy); |
335 | 337 |
336 int sizeOfIndices = sizeof(uint32_t) * count; | 338 int sizeOfIndices = sizeof(uint32_t) * count; |
337 int sizeOfTransforms = sizeof(float) * count * | 339 int sizeOfTransforms = sizeof(float) * count * |
338 GrPathRendering::PathTransformSize(transformsType); | 340 GrPathRendering::PathTransformSize(transformsType); |
339 | 341 |
340 DrawPaths* dp = GrNEW_APPEND_WITH_DATA_TO_RECORDER(fCmdBuffer, DrawPaths, (p
athRange), | 342 DrawPaths* dp = GrNEW_APPEND_WITH_DATA_TO_RECORDER(fCmdBuffer, DrawPaths, (p
athRange), |
341 sizeOfIndices + sizeOfTra
nsforms); | 343 sizeOfIndices + sizeOfTra
nsforms); |
342 memcpy(dp->indices(), indices, sizeOfIndices); | 344 memcpy(dp->indices(), indices, sizeOfIndices); |
343 dp->fCount = count; | 345 dp->fCount = count; |
344 memcpy(dp->transforms(), transforms, sizeOfTransforms); | 346 memcpy(dp->transforms(), transforms, sizeOfTransforms); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 if (fCmdBuffer.empty()) { | 424 if (fCmdBuffer.empty()) { |
423 return; | 425 return; |
424 } | 426 } |
425 | 427 |
426 GrAutoTRestore<bool> flushRestore(&fFlushing); | 428 GrAutoTRestore<bool> flushRestore(&fFlushing); |
427 fFlushing = true; | 429 fFlushing = true; |
428 | 430 |
429 fVertexPool.unmap(); | 431 fVertexPool.unmap(); |
430 fIndexPool.unmap(); | 432 fIndexPool.unmap(); |
431 | 433 |
432 GrDrawState* prevDrawState = SkRef(fDstGpu->drawState()); | |
433 | |
434 CmdBuffer::Iter iter(fCmdBuffer); | 434 CmdBuffer::Iter iter(fCmdBuffer); |
435 | 435 |
436 int currCmdMarker = 0; | 436 int currCmdMarker = 0; |
437 fDstGpu->saveActiveTraceMarkers(); | 437 fDstGpu->saveActiveTraceMarkers(); |
438 | 438 |
| 439 // Gpu no longer maintains the current drawstate, so we track the setstate c
alls below. |
| 440 // NOTE: we always record a new drawstate at flush boundaries |
| 441 SkAutoTUnref<const GrOptDrawState> currentOptState; |
| 442 |
439 while (iter.next()) { | 443 while (iter.next()) { |
440 GrGpuTraceMarker newMarker("", -1); | 444 GrGpuTraceMarker newMarker("", -1); |
441 SkString traceString; | 445 SkString traceString; |
442 if (cmd_has_trace_marker(iter->fType)) { | 446 if (cmd_has_trace_marker(iter->fType)) { |
443 traceString = fGpuCmdMarkers[currCmdMarker].toString(); | 447 traceString = fGpuCmdMarkers[currCmdMarker].toString(); |
444 newMarker.fMarker = traceString.c_str(); | 448 newMarker.fMarker = traceString.c_str(); |
445 fDstGpu->addGpuTraceMarker(&newMarker); | 449 fDstGpu->addGpuTraceMarker(&newMarker); |
446 ++currCmdMarker; | 450 ++currCmdMarker; |
447 } | 451 } |
448 | 452 |
449 SkDEBUGCODE(bool isDraw = kDraw_Cmd == strip_trace_bit(iter->fType) || | 453 if (kSetState_Cmd == strip_trace_bit(iter->fType)) { |
450 kStencilPath_Cmd == strip_trace_bit(iter->fTyp
e) || | 454 const SetState* ss = reinterpret_cast<const SetState*>(iter.get()); |
451 kDrawPath_Cmd == strip_trace_bit(iter->fType)
|| | 455 currentOptState.reset(GrOptDrawState::Create(ss->fState, |
452 kDrawPaths_Cmd == strip_trace_bit(iter->fType)
); | 456 fDstGpu, |
453 SkASSERT(!isDraw || fDstGpu->drawState() != prevDrawState); | 457 &ss->fDstCopy, |
454 | 458 ss->fDrawType)); |
455 iter->execute(fDstGpu); | 459 } else { |
| 460 iter->execute(fDstGpu, currentOptState.get()); |
| 461 } |
456 | 462 |
457 if (cmd_has_trace_marker(iter->fType)) { | 463 if (cmd_has_trace_marker(iter->fType)) { |
458 fDstGpu->removeGpuTraceMarker(&newMarker); | 464 fDstGpu->removeGpuTraceMarker(&newMarker); |
459 } | 465 } |
460 } | 466 } |
461 | 467 |
462 fDstGpu->restoreActiveTraceMarkers(); | 468 fDstGpu->restoreActiveTraceMarkers(); |
463 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); | 469 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); |
464 | 470 |
465 fDstGpu->setDrawState(prevDrawState); | |
466 prevDrawState->unref(); | |
467 this->reset(); | 471 this->reset(); |
468 ++fDrawID; | 472 ++fDrawID; |
469 } | 473 } |
470 | 474 |
471 void GrInOrderDrawBuffer::Draw::execute(GrGpu* gpu) { | 475 void GrInOrderDrawBuffer::Draw::execute(GrGpu* gpu, const GrOptDrawState* optSta
te) { |
472 gpu->setVertexSourceToBuffer(this->vertexBuffer()); | 476 if (!optState) { |
| 477 return; |
| 478 } |
| 479 gpu->setVertexSourceToBuffer(this->vertexBuffer(), optState->getVertexStride
()); |
473 if (fInfo.isIndexed()) { | 480 if (fInfo.isIndexed()) { |
474 gpu->setIndexSourceToBuffer(this->indexBuffer()); | 481 gpu->setIndexSourceToBuffer(this->indexBuffer()); |
475 } | 482 } |
476 gpu->draw(fInfo, fScissorState); | 483 gpu->draw(*optState, fInfo, fScissorState); |
477 } | 484 } |
478 | 485 |
479 void GrInOrderDrawBuffer::StencilPath::execute(GrGpu* gpu) { | 486 void GrInOrderDrawBuffer::StencilPath::execute(GrGpu* gpu, const GrOptDrawState*
optState) { |
480 gpu->stencilPath(this->path(), fScissorState, fStencilSettings); | 487 if (!optState) { |
| 488 return; |
| 489 } |
| 490 gpu->stencilPath(*optState, this->path(), fScissorState, fStencilSettings); |
481 } | 491 } |
482 | 492 |
483 void GrInOrderDrawBuffer::DrawPath::execute(GrGpu* gpu) { | 493 void GrInOrderDrawBuffer::DrawPath::execute(GrGpu* gpu, const GrOptDrawState* op
tState) { |
484 gpu->drawPath(this->path(), fScissorState, fStencilSettings, | 494 if (!optState) { |
485 fDstCopy.texture() ? &fDstCopy : NULL); | 495 return; |
| 496 } |
| 497 gpu->drawPath(*optState, this->path(), fScissorState, fStencilSettings, |
| 498 fDstCopy.texture() ? &fDstCopy : NULL); |
486 } | 499 } |
487 | 500 |
488 void GrInOrderDrawBuffer::DrawPaths::execute(GrGpu* gpu) { | 501 void GrInOrderDrawBuffer::DrawPaths::execute(GrGpu* gpu, const GrOptDrawState* o
ptState) { |
489 gpu->drawPaths(this->pathRange(), this->indices(), fCount, this->transforms(
), | 502 if (!optState) { |
490 fTransformsType, fScissorState, fStencilSettings, | 503 return; |
491 fDstCopy.texture() ? &fDstCopy : NULL); | 504 } |
| 505 gpu->drawPaths(*optState, this->pathRange(), this->indices(), fCount, this->
transforms(), |
| 506 fTransformsType, fScissorState, fStencilSettings, |
| 507 fDstCopy.texture() ? &fDstCopy : NULL); |
492 } | 508 } |
493 | 509 |
494 void GrInOrderDrawBuffer::SetState::execute(GrGpu* gpu) { | 510 void GrInOrderDrawBuffer::SetState::execute(GrGpu* gpu, const GrOptDrawState*) { |
495 gpu->setDrawState(&fState); | |
496 } | 511 } |
497 | 512 |
498 void GrInOrderDrawBuffer::Clear::execute(GrGpu* gpu) { | 513 void GrInOrderDrawBuffer::Clear::execute(GrGpu* gpu, const GrOptDrawState*) { |
499 if (GrColor_ILLEGAL == fColor) { | 514 if (GrColor_ILLEGAL == fColor) { |
500 gpu->discard(this->renderTarget()); | 515 gpu->discard(this->renderTarget()); |
501 } else { | 516 } else { |
502 gpu->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget()); | 517 gpu->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget()); |
503 } | 518 } |
504 } | 519 } |
505 | 520 |
506 void GrInOrderDrawBuffer::ClearStencilClip::execute(GrGpu* gpu) { | 521 void GrInOrderDrawBuffer::ClearStencilClip::execute(GrGpu* gpu, const GrOptDrawS
tate*) { |
507 gpu->clearStencilClip(fRect, fInsideClip, this->renderTarget()); | 522 gpu->clearStencilClip(fRect, fInsideClip, this->renderTarget()); |
508 } | 523 } |
509 | 524 |
510 void GrInOrderDrawBuffer::CopySurface::execute(GrGpu* gpu) { | 525 void GrInOrderDrawBuffer::CopySurface::execute(GrGpu* gpu, const GrOptDrawState*
){ |
511 gpu->copySurface(this->dst(), this->src(), fSrcRect, fDstPoint); | 526 gpu->copySurface(this->dst(), this->src(), fSrcRect, fDstPoint); |
512 } | 527 } |
513 | 528 |
514 bool GrInOrderDrawBuffer::copySurface(GrSurface* dst, | 529 bool GrInOrderDrawBuffer::copySurface(GrSurface* dst, |
515 GrSurface* src, | 530 GrSurface* src, |
516 const SkIRect& srcRect, | 531 const SkIRect& srcRect, |
517 const SkIPoint& dstPoint) { | 532 const SkIPoint& dstPoint) { |
518 if (fDstGpu->canCopySurface(dst, src, srcRect, dstPoint)) { | 533 if (fDstGpu->canCopySurface(dst, src, srcRect, dstPoint)) { |
519 CopySurface* cs = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, CopySurface, (dst
, src)); | 534 CopySurface* cs = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, CopySurface, (dst
, src)); |
520 cs->fSrcRect = srcRect; | 535 cs->fSrcRect = srcRect; |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
633 | 648 |
634 // If we get a release vertex space call then our current source should eith
er be reserved | 649 // If we get a release vertex space call then our current source should eith
er be reserved |
635 // or array (which we copied into reserved space). | 650 // or array (which we copied into reserved space). |
636 SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc); | 651 SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc); |
637 | 652 |
638 // When the caller reserved vertex buffer space we gave it back a pointer | 653 // When the caller reserved vertex buffer space we gave it back a pointer |
639 // provided by the vertex buffer pool. At each draw we tracked the largest | 654 // provided by the vertex buffer pool. At each draw we tracked the largest |
640 // offset into the pool's pointer that was referenced. Now we return to the | 655 // offset into the pool's pointer that was referenced. Now we return to the |
641 // pool any portion at the tail of the allocation that no draw referenced. | 656 // pool any portion at the tail of the allocation that no draw referenced. |
642 size_t reservedVertexBytes = geoSrc.fVertexSize * geoSrc.fVertexCount; | 657 size_t reservedVertexBytes = geoSrc.fVertexSize * geoSrc.fVertexCount; |
643 fVertexPool.putBack(reservedVertexBytes - | 658 fVertexPool.putBack(reservedVertexBytes - poolState.fUsedPoolVertexBytes); |
644 poolState.fUsedPoolVertexBytes); | |
645 poolState.fUsedPoolVertexBytes = 0; | 659 poolState.fUsedPoolVertexBytes = 0; |
646 poolState.fPoolVertexBuffer = NULL; | 660 poolState.fPoolVertexBuffer = NULL; |
647 poolState.fPoolStartVertex = 0; | 661 poolState.fPoolStartVertex = 0; |
648 } | 662 } |
649 | 663 |
650 void GrInOrderDrawBuffer::releaseReservedIndexSpace() { | 664 void GrInOrderDrawBuffer::releaseReservedIndexSpace() { |
651 GeometryPoolState& poolState = fGeoPoolStateStack.back(); | 665 GeometryPoolState& poolState = fGeoPoolStateStack.back(); |
652 const GeometrySrcState& geoSrc = this->getGeomSrc(); | 666 const GeometrySrcState& geoSrc = this->getGeomSrc(); |
653 | 667 |
654 // If we get a release index space call then our current source should eithe
r be reserved | 668 // If we get a release index space call then our current source should eithe
r be reserved |
(...skipping 25 matching lines...) Expand all Loading... |
680 SkASSERT(fGeoPoolStateStack.count() > 1); | 694 SkASSERT(fGeoPoolStateStack.count() > 1); |
681 fGeoPoolStateStack.pop_back(); | 695 fGeoPoolStateStack.pop_back(); |
682 GeometryPoolState& poolState = fGeoPoolStateStack.back(); | 696 GeometryPoolState& poolState = fGeoPoolStateStack.back(); |
683 // we have to assume that any slack we had in our vertex/index data | 697 // we have to assume that any slack we had in our vertex/index data |
684 // is now unreleasable because data may have been appended later in the | 698 // is now unreleasable because data may have been appended later in the |
685 // pool. | 699 // pool. |
686 if (kReserved_GeometrySrcType == restoredState.fVertexSrc) { | 700 if (kReserved_GeometrySrcType == restoredState.fVertexSrc) { |
687 poolState.fUsedPoolVertexBytes = restoredState.fVertexSize * restoredSta
te.fVertexCount; | 701 poolState.fUsedPoolVertexBytes = restoredState.fVertexSize * restoredSta
te.fVertexCount; |
688 } | 702 } |
689 if (kReserved_GeometrySrcType == restoredState.fIndexSrc) { | 703 if (kReserved_GeometrySrcType == restoredState.fIndexSrc) { |
690 poolState.fUsedPoolIndexBytes = sizeof(uint16_t) * | 704 poolState.fUsedPoolIndexBytes = sizeof(uint16_t) * restoredState.fIndexC
ount; |
691 restoredState.fIndexCount; | |
692 } | 705 } |
693 } | 706 } |
694 | 707 |
695 void GrInOrderDrawBuffer::recordStateIfNecessary() { | 708 void GrInOrderDrawBuffer::recordStateIfNecessary(GrGpu::DrawType drawType, |
| 709 const GrDeviceCoordTexture* dst
Copy) { |
696 if (!fLastState) { | 710 if (!fLastState) { |
697 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, (this->get
DrawState())); | 711 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, (this->get
DrawState())); |
698 fLastState = &ss->fState; | 712 fLastState = &ss->fState; |
| 713 if (dstCopy) { |
| 714 ss->fDstCopy = *dstCopy; |
| 715 } |
| 716 ss->fDrawType = drawType; |
699 this->convertDrawStateToPendingExec(fLastState); | 717 this->convertDrawStateToPendingExec(fLastState); |
700 this->recordTraceMarkersIfNecessary(); | 718 this->recordTraceMarkersIfNecessary(); |
701 return; | 719 return; |
702 } | 720 } |
703 const GrDrawState& curr = this->getDrawState(); | 721 const GrDrawState& curr = this->getDrawState(); |
704 switch (GrDrawState::CombineIfPossible(*fLastState, curr, *this->caps())) { | 722 switch (GrDrawState::CombineIfPossible(*fLastState, curr, *this->caps())) { |
705 case GrDrawState::kIncompatible_CombinedState: | 723 case GrDrawState::kIncompatible_CombinedState: { |
706 fLastState = &GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, (curr))
->fState; | 724 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, (curr)
); |
| 725 fLastState = &ss->fState; |
| 726 if (dstCopy) { |
| 727 ss->fDstCopy = *dstCopy; |
| 728 } |
| 729 ss->fDrawType = drawType; |
707 this->convertDrawStateToPendingExec(fLastState); | 730 this->convertDrawStateToPendingExec(fLastState); |
708 this->recordTraceMarkersIfNecessary(); | 731 this->recordTraceMarkersIfNecessary(); |
709 break; | 732 break; |
| 733 } |
710 case GrDrawState::kA_CombinedState: | 734 case GrDrawState::kA_CombinedState: |
711 case GrDrawState::kAOrB_CombinedState: // Treat the same as kA. | 735 case GrDrawState::kAOrB_CombinedState: // Treat the same as kA. |
712 break; | 736 break; |
713 case GrDrawState::kB_CombinedState: | 737 case GrDrawState::kB_CombinedState: |
714 // prev has already been converted to pending execution. That is a o
ne-way ticket. | 738 // prev has already been converted to pending execution. That is a o
ne-way ticket. |
715 // So here we just destruct the previous state and reinit with a new
copy of curr. | 739 // So here we just destruct the previous state and reinit with a new
copy of curr. |
716 // Note that this goes away when we move GrIODB over to taking optim
ized snapshots | 740 // Note that this goes away when we move GrIODB over to taking optim
ized snapshots |
717 // of draw states. | 741 // of draw states. |
718 fLastState->~GrDrawState(); | 742 fLastState->~GrDrawState(); |
719 SkNEW_PLACEMENT_ARGS(fLastState, GrDrawState, (curr)); | 743 SkNEW_PLACEMENT_ARGS(fLastState, GrDrawState, (curr)); |
720 this->convertDrawStateToPendingExec(fLastState); | 744 this->convertDrawStateToPendingExec(fLastState); |
721 break; | 745 break; |
722 } | 746 } |
723 } | 747 } |
724 | 748 |
725 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { | 749 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { |
726 SkASSERT(!fCmdBuffer.empty()); | 750 SkASSERT(!fCmdBuffer.empty()); |
727 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); | 751 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); |
728 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); | 752 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); |
729 if (activeTraceMarkers.count() > 0) { | 753 if (activeTraceMarkers.count() > 0) { |
730 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); | 754 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); |
731 fGpuCmdMarkers.push_back(activeTraceMarkers); | 755 fGpuCmdMarkers.push_back(activeTraceMarkers); |
732 } | 756 } |
733 } | 757 } |
OLD | NEW |