Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "SkRecordDraw.h" | 8 #include "SkRecordDraw.h" |
| 9 #include "SkPatchUtils.h" | 9 #include "SkPatchUtils.h" |
| 10 | 10 |
| 11 #if SK_SUPPORT_GPU | |
| 12 #include "GrPictureUtils.h" | |
| 13 #endif | |
| 14 | |
| 11 void SkRecordDraw(const SkRecord& record, | 15 void SkRecordDraw(const SkRecord& record, |
| 12 SkCanvas* canvas, | 16 SkCanvas* canvas, |
| 13 const SkBBoxHierarchy* bbh, | 17 const SkBBoxHierarchy* bbh, |
| 14 SkDrawPictureCallback* callback) { | 18 SkDrawPictureCallback* callback) { |
| 15 SkAutoCanvasRestore saveRestore(canvas, true /*save now, restore at exit*/); | 19 SkAutoCanvasRestore saveRestore(canvas, true /*save now, restore at exit*/); |
| 16 | 20 |
| 17 if (bbh) { | 21 if (bbh) { |
| 18 // Draw only ops that affect pixels in the canvas's current clip. | 22 // Draw only ops that affect pixels in the canvas's current clip. |
| 19 // The SkRecord and BBH were recorded in identity space. This canvas | 23 // The SkRecord and BBH were recorded in identity space. This canvas |
| 20 // is not necessarily in that same space. getClipBounds() returns us | 24 // is not necessarily in that same space. getClipBounds() returns us |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 135 // non-drawing ("control") ops inside are exactly the union of the bounds of | 139 // non-drawing ("control") ops inside are exactly the union of the bounds of |
| 136 // the drawing ops inside that block. | 140 // the drawing ops inside that block. |
| 137 // | 141 // |
| 138 // To implement this, we keep a stack of active Save blocks. As we consume ops | 142 // To implement this, we keep a stack of active Save blocks. As we consume ops |
| 139 // inside the Save/Restore block, drawing ops are unioned with the bounds of | 143 // inside the Save/Restore block, drawing ops are unioned with the bounds of |
| 140 // the block, and control ops are stashed away for later. When we finish the | 144 // the block, and control ops are stashed away for later. When we finish the |
| 141 // block with a Restore, our bounds are complete, and we go back and fill them | 145 // block with a Restore, our bounds are complete, and we go back and fill them |
| 142 // in for all the control ops we stashed away. | 146 // in for all the control ops we stashed away. |
| 143 class FillBounds : SkNoncopyable { | 147 class FillBounds : SkNoncopyable { |
| 144 public: | 148 public: |
| 145 FillBounds(const SkRect& cullRect, const SkRecord& record, SkBBoxHierarchy* bbh) | 149 FillBounds(const SkRect& cullRect, const SkRecord& record, SkBBoxHierarchy* bbh) |
|
mtklein
2014/11/11 21:17:30
If we can get away with it, let's just pass bbh as
robertphillips
2014/11/12 13:46:26
Done.
| |
| 146 : fCullRect(cullRect) | 150 : fNumRecords(record.count()) |
| 151 , fBBH(bbh) | |
| 152 , fCullRect(cullRect) | |
| 147 , fBounds(record.count()) { | 153 , fBounds(record.count()) { |
| 148 // Calculate bounds for all ops. This won't go quite in order, so we'll need | 154 // Calculate bounds for all ops. This won't go quite in order, so we'll need |
| 149 // to store the bounds separately then feed them in to the BBH later in order. | 155 // to store the bounds separately then feed them in to the BBH later in order. |
| 150 fCTM = &SkMatrix::I(); | 156 fCTM = &SkMatrix::I(); |
| 151 fCurrentClipBounds = fCullRect; | 157 fCurrentClipBounds = fCullRect; |
| 152 for (fCurrentOp = 0; fCurrentOp < record.count(); fCurrentOp++) { | 158 } |
| 153 record.visit<void>(fCurrentOp, *this); | |
| 154 } | |
| 155 | 159 |
| 160 void setCurrentOp(unsigned currentOp) { fCurrentOp = currentOp; } | |
| 161 | |
| 162 void cleanUp() { | |
| 156 // If we have any lingering unpaired Saves, simulate restores to make | 163 // If we have any lingering unpaired Saves, simulate restores to make |
| 157 // sure all ops in those Save blocks have their bounds calculated. | 164 // sure all ops in those Save blocks have their bounds calculated. |
| 158 while (!fSaveStack.isEmpty()) { | 165 while (!fSaveStack.isEmpty()) { |
| 159 this->popSaveBlock(); | 166 this->popSaveBlock(); |
| 160 } | 167 } |
| 161 | 168 |
| 162 // Any control ops not part of any Save/Restore block draw everywhere. | 169 // Any control ops not part of any Save/Restore block draw everywhere. |
| 163 while (!fControlIndices.isEmpty()) { | 170 while (!fControlIndices.isEmpty()) { |
| 164 this->popControl(fCullRect); | 171 this->popControl(fCullRect); |
| 165 } | 172 } |
| 166 | 173 |
| 167 // Finally feed all stored bounds into the BBH. They'll be returned in this order. | 174 // Finally feed all stored bounds into the BBH. They'll be returned in this order. |
| 168 SkASSERT(bbh); | 175 SkASSERT(fBBH); |
| 169 bbh->insert(&fBounds, record.count()); | 176 fBBH->insert(&fBounds, fNumRecords); |
| 170 } | 177 } |
| 171 | 178 |
| 172 template <typename T> void operator()(const T& op) { | 179 template <typename T> void operator()(const T& op) { |
| 173 this->updateCTM(op); | 180 this->updateCTM(op); |
| 174 this->updateClipBounds(op); | 181 this->updateClipBounds(op); |
| 175 this->trackBounds(op); | 182 this->trackBounds(op); |
| 176 } | 183 } |
| 177 | 184 |
| 178 private: | |
| 179 // In this file, SkRect are in local coordinates, Bounds are translated back to identity space. | 185 // In this file, SkRect are in local coordinates, Bounds are translated back to identity space. |
| 180 typedef SkRect Bounds; | 186 typedef SkRect Bounds; |
| 181 | 187 |
| 188 unsigned currentOp() const { return fCurrentOp; } | |
| 189 const SkMatrix& ctm() const { return *fCTM; } | |
| 190 const Bounds& currentClipBounds() const { return fCurrentClipBounds; } | |
| 191 const Bounds& getBounds(unsigned index) const { return fBounds[index]; } | |
| 192 | |
| 193 // Adjust rect for all paints that may affect its geometry, then map it to i dentity space. | |
| 194 Bounds adjustAndMap(SkRect rect, const SkPaint* paint) const { | |
| 195 // Inverted rectangles really confuse our BBHs. | |
| 196 rect.sort(); | |
| 197 | |
| 198 // Adjust the rect for its own paint. | |
| 199 if (!AdjustForPaint(paint, &rect)) { | |
| 200 // The paint could do anything to our bounds. The only safe answer is the current clip. | |
| 201 return fCurrentClipBounds; | |
| 202 } | |
| 203 | |
| 204 // Adjust rect for all the paints from the SaveLayers we're inside. | |
| 205 if (!this->adjustForSaveLayerPaints(&rect)) { | |
| 206 // Same deal as above. | |
| 207 return fCurrentClipBounds; | |
| 208 } | |
| 209 | |
| 210 // Map the rect back to identity space. | |
| 211 fCTM->mapRect(&rect); | |
| 212 | |
| 213 // Nothing can draw outside the current clip. | |
| 214 // (Only bounded ops call into this method, so oddballs like Clear don't matter here.) | |
| 215 rect.intersect(fCurrentClipBounds); | |
| 216 return rect; | |
| 217 } | |
| 218 | |
| 219 private: | |
| 182 struct SaveBounds { | 220 struct SaveBounds { |
| 183 int controlOps; // Number of control ops in this Save block, incl uding the Save. | 221 int controlOps; // Number of control ops in this Save block, incl uding the Save. |
| 184 Bounds bounds; // Bounds of everything in the block. | 222 Bounds bounds; // Bounds of everything in the block. |
| 185 const SkPaint* paint; // Unowned. If set, adjusts the bounds of all op s in this block. | 223 const SkPaint* paint; // Unowned. If set, adjusts the bounds of all op s in this block. |
| 186 }; | 224 }; |
| 187 | 225 |
| 188 // Only Restore and SetMatrix change the CTM. | 226 // Only Restore and SetMatrix change the CTM. |
| 189 template <typename T> void updateCTM(const T&) {} | 227 template <typename T> void updateCTM(const T&) {} |
| 190 void updateCTM(const Restore& op) { fCTM = &op.matrix; } | 228 void updateCTM(const Restore& op) { fCTM = &op.matrix; } |
| 191 void updateCTM(const SetMatrix& op) { fCTM = &op.matrix; } | 229 void updateCTM(const SetMatrix& op) { fCTM = &op.matrix; } |
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 507 | 545 |
| 508 bool adjustForSaveLayerPaints(SkRect* rect, int savesToIgnore = 0) const { | 546 bool adjustForSaveLayerPaints(SkRect* rect, int savesToIgnore = 0) const { |
| 509 for (int i = fSaveStack.count() - 1 - savesToIgnore; i >= 0; i--) { | 547 for (int i = fSaveStack.count() - 1 - savesToIgnore; i >= 0; i--) { |
| 510 if (!AdjustForPaint(fSaveStack[i].paint, rect)) { | 548 if (!AdjustForPaint(fSaveStack[i].paint, rect)) { |
| 511 return false; | 549 return false; |
| 512 } | 550 } |
| 513 } | 551 } |
| 514 return true; | 552 return true; |
| 515 } | 553 } |
| 516 | 554 |
| 517 // Adjust rect for all paints that may affect its geometry, then map it to i dentity space. | 555 const unsigned int fNumRecords; |
|
mtklein
2014/11/11 21:17:30
We usually just write unsigned.
robertphillips
2014/11/12 13:46:26
Done.
| |
| 518 Bounds adjustAndMap(SkRect rect, const SkPaint* paint) const { | |
| 519 // Inverted rectangles really confuse our BBHs. | |
| 520 rect.sort(); | |
| 521 | 556 |
| 522 // Adjust the rect for its own paint. | 557 // The BBH being filled in |
| 523 if (!AdjustForPaint(paint, &rect)) { | 558 SkBBoxHierarchy* fBBH; |
| 524 // The paint could do anything to our bounds. The only safe answer is the current clip. | |
| 525 return fCurrentClipBounds; | |
| 526 } | |
| 527 | |
| 528 // Adjust rect for all the paints from the SaveLayers we're inside. | |
| 529 if (!this->adjustForSaveLayerPaints(&rect)) { | |
| 530 // Same deal as above. | |
| 531 return fCurrentClipBounds; | |
| 532 } | |
| 533 | |
| 534 // Map the rect back to identity space. | |
| 535 fCTM->mapRect(&rect); | |
| 536 | |
| 537 // Nothing can draw outside the current clip. | |
| 538 // (Only bounded ops call into this method, so oddballs like Clear don't matter here.) | |
| 539 rect.intersect(fCurrentClipBounds); | |
| 540 return rect; | |
| 541 } | |
| 542 | 559 |
| 543 // We do not guarantee anything for operations outside of the cull rect | 560 // We do not guarantee anything for operations outside of the cull rect |
| 544 const SkRect fCullRect; | 561 const SkRect fCullRect; |
| 545 | 562 |
| 546 // Conservative identity-space bounds for each op in the SkRecord. | 563 // Conservative identity-space bounds for each op in the SkRecord. |
| 547 SkAutoTMalloc<Bounds> fBounds; | 564 SkAutoTMalloc<Bounds> fBounds; |
| 548 | 565 |
| 549 // We walk fCurrentOp through the SkRecord, as we go using updateCTM() | 566 // We walk fCurrentOp through the SkRecord, as we go using updateCTM() |
| 550 // and updateClipBounds() to maintain the exact CTM (fCTM) and conservative | 567 // and updateClipBounds() to maintain the exact CTM (fCTM) and conservative |
| 551 // identity-space bounds of the current clip (fCurrentClipBounds). | 568 // identity-space bounds of the current clip (fCurrentClipBounds). |
| 552 unsigned fCurrentOp; | 569 unsigned fCurrentOp; |
| 553 const SkMatrix* fCTM; | 570 const SkMatrix* fCTM; |
| 554 Bounds fCurrentClipBounds; | 571 Bounds fCurrentClipBounds; |
| 555 | 572 |
| 556 // Used to track the bounds of Save/Restore blocks and the control ops insid e them. | 573 // Used to track the bounds of Save/Restore blocks and the control ops insid e them. |
| 557 SkTDArray<SaveBounds> fSaveStack; | 574 SkTDArray<SaveBounds> fSaveStack; |
| 558 SkTDArray<unsigned> fControlIndices; | 575 SkTDArray<unsigned> fControlIndices; |
| 559 }; | 576 }; |
| 560 | 577 |
| 578 #if SK_SUPPORT_GPU | |
| 579 // SkRecord visitor to gather saveLayer/restore information. | |
| 580 class CollectLayers : SkNoncopyable { | |
| 581 public: | |
| 582 CollectLayers(const SkRect& cullRect, const SkRecord& record, | |
| 583 SkBBoxHierarchy* bbh, GrAccelData* accelData) | |
| 584 : fSaveLayersInStack(0) | |
| 585 , fAccelData(accelData) | |
| 586 , fFillBounds(cullRect, record, bbh) { | |
| 587 } | |
| 588 | |
| 589 void setCurrentOp(unsigned currentOp) { fFillBounds.setCurrentOp(currentOp); } | |
| 590 | |
| 591 void cleanUp() { | |
| 592 fFillBounds.cleanUp(); | |
|
mtklein
2014/11/11 21:17:30
Does the ordering of cleanUp() and the popSaveLaye
robertphillips
2014/11/12 13:46:26
Done.
| |
| 593 | |
| 594 while (!fSaveLayerStack.isEmpty()) { | |
| 595 this->popSaveLayerInfo(); | |
| 596 } | |
| 597 } | |
| 598 | |
| 599 template <typename T> void operator()(const T& op) { | |
| 600 fFillBounds(op); | |
| 601 this->trackSaveLayers(op); | |
| 602 } | |
| 603 | |
| 604 private: | |
| 605 class SaveLayerInfo { | |
|
mtklein
2014/11/11 21:17:30
some might rewrite "class ... { public: " -> "stru
robertphillips
2014/11/12 13:46:26
Done.
| |
| 606 public: | |
| 607 SaveLayerInfo() { } | |
| 608 SaveLayerInfo(int opIndex, bool isSaveLayer, const SkPaint* paint, const FillBounds::Bounds& clipBound) | |
| 609 : fStartIndex(opIndex) | |
| 610 , fIsSaveLayer(isSaveLayer) | |
| 611 , fHasNestedSaveLayer(false) | |
| 612 , fPaint(paint) | |
| 613 , fClipBound(clipBound) { | |
| 614 } | |
| 615 | |
| 616 int fStartIndex; | |
| 617 bool fIsSaveLayer; | |
| 618 bool fHasNestedSaveLayer; | |
| 619 const SkPaint* fPaint; | |
| 620 FillBounds::Bounds fClipBound; | |
| 621 }; | |
| 622 | |
| 623 template <typename T> void trackSaveLayers(const T& op) { | |
| 624 /* most ops aren't involved in saveLayers */ | |
| 625 } | |
| 626 void trackSaveLayers(const Save& s) { this->pushSaveLayerInfo(false, NULL); } | |
| 627 void trackSaveLayers(const SaveLayer& sl) { this->pushSaveLayerInfo(true, sl .paint); } | |
| 628 void trackSaveLayers(const Restore& r) { this->popSaveLayerInfo(); } | |
| 629 | |
| 630 void trackSaveLayers(const DrawPicture& dp) { | |
| 631 // For sub-pictures, we wrap their layer information within the parent | |
| 632 // picture's rendering hierarchy | |
| 633 SkPicture::AccelData::Key key = GrAccelData::ComputeAccelDataKey(); | |
|
mtklein
2014/11/11 21:17:30
Just out of curiosity, how much of GrAccelData is
robertphillips
2014/11/12 13:46:25
I think that's a good idea but would prefer to del
| |
| 634 | |
| 635 const GrAccelData* childData = | |
| 636 static_cast<const GrAccelData*>(dp.picture->EXPERIMENTAL_getAccelDat a(key)); | |
| 637 if (!childData) { | |
| 638 // If the child layer hasn't been generated with saveLayer data we | |
| 639 // assume the worst (i.e., that it does contain layers which nest | |
| 640 // inside existing layers). Layers within sub-pictures that don't | |
| 641 // have saveLayer data cannot be hoisted. | |
| 642 // TODO: could the analysis data be use to fine tune this? | |
| 643 this->updateStackForSaveLayer(); | |
| 644 return; | |
| 645 } | |
| 646 | |
| 647 for (int i = 0; i < childData->numSaveLayers(); ++i) { | |
| 648 const GrAccelData::SaveLayerInfo& src = childData->saveLayerInfo(i); | |
| 649 | |
| 650 FillBounds::Bounds newClip(fFillBounds.currentClipBounds()); | |
| 651 | |
| 652 if (!newClip.intersect(fFillBounds.adjustAndMap(src.fBounds, dp.pain t))) { | |
| 653 continue; | |
| 654 } | |
| 655 | |
| 656 this->updateStackForSaveLayer(); | |
| 657 | |
| 658 GrAccelData::SaveLayerInfo& dst = fAccelData->addSaveLayerInfo(); | |
| 659 | |
| 660 // If src.fPicture is NULL the layer is in dp.picture; otherwise | |
| 661 // it belongs to a sub-picture. | |
| 662 dst.fPicture = src.fPicture ? src.fPicture : static_cast<const SkPic ture*>(dp.picture); | |
| 663 dst.fPicture->ref(); | |
| 664 dst.fBounds = newClip; | |
| 665 dst.fLocalMat = src.fLocalMat; | |
| 666 dst.fPreMat = src.fPreMat; | |
| 667 dst.fPreMat.postConcat(fFillBounds.ctm()); | |
| 668 if (src.fPaint) { | |
| 669 dst.fPaint = SkNEW_ARGS(SkPaint, (*src.fPaint)); | |
| 670 } | |
| 671 dst.fSaveLayerOpID = src.fSaveLayerOpID; | |
| 672 dst.fRestoreOpID = src.fRestoreOpID; | |
| 673 dst.fHasNestedLayers = src.fHasNestedLayers; | |
| 674 dst.fIsNested = fSaveLayersInStack > 0 || src.fIsNested; | |
| 675 } | |
| 676 } | |
| 677 | |
| 678 // Inform all the saveLayers already on the stack that they now have a | |
| 679 // nested saveLayer inside them | |
| 680 void updateStackForSaveLayer() { | |
| 681 for (int index = fSaveLayerStack.count() - 1; index >= 0; --index) { | |
| 682 if (fSaveLayerStack[index].fHasNestedSaveLayer) { | |
| 683 break; | |
| 684 } | |
| 685 fSaveLayerStack[index].fHasNestedSaveLayer = true; | |
| 686 if (fSaveLayerStack[index].fIsSaveLayer) { | |
| 687 break; | |
| 688 } | |
| 689 } | |
| 690 } | |
| 691 | |
| 692 void pushSaveLayerInfo(bool isSaveLayer, const SkPaint* paint) { | |
| 693 if (isSaveLayer) { | |
| 694 this->updateStackForSaveLayer(); | |
| 695 ++fSaveLayersInStack; | |
| 696 } | |
| 697 | |
| 698 fSaveLayerStack.push(SaveLayerInfo(fFillBounds.currentOp(), isSaveLayer, paint, | |
| 699 fFillBounds.currentClipBounds())); | |
| 700 } | |
| 701 | |
| 702 void popSaveLayerInfo() { | |
| 703 if (fSaveLayerStack.count() <= 0) { | |
| 704 SkASSERT(false); | |
| 705 return; | |
| 706 } | |
| 707 | |
| 708 SaveLayerInfo sli; | |
| 709 fSaveLayerStack.pop(&sli); | |
| 710 | |
| 711 if (!sli.fIsSaveLayer) { | |
| 712 return; | |
| 713 } | |
| 714 | |
| 715 --fSaveLayersInStack; | |
| 716 | |
| 717 GrAccelData::SaveLayerInfo& slInfo = fAccelData->addSaveLayerInfo(); | |
| 718 | |
| 719 SkASSERT(NULL == slInfo.fPicture); // This layer is in the top-most pic ture | |
| 720 | |
| 721 slInfo.fBounds = fFillBounds.getBounds(sli.fStartIndex); | |
| 722 slInfo.fBounds.intersect(sli.fClipBound); | |
| 723 slInfo.fLocalMat = fFillBounds.ctm(); | |
| 724 slInfo.fPreMat = SkMatrix::I(); | |
| 725 if (sli.fPaint) { | |
| 726 slInfo.fPaint = SkNEW_ARGS(SkPaint, (*sli.fPaint)); | |
| 727 } | |
| 728 slInfo.fSaveLayerOpID = sli.fStartIndex; | |
| 729 slInfo.fRestoreOpID = fFillBounds.currentOp(); | |
| 730 slInfo.fHasNestedLayers = sli.fHasNestedSaveLayer; | |
| 731 slInfo.fIsNested = fSaveLayersInStack > 0; | |
| 732 } | |
| 733 | |
| 734 // Used to collect saveLayer information for layer hoisting | |
| 735 int fSaveLayersInStack; | |
| 736 SkTDArray<SaveLayerInfo> fSaveLayerStack; | |
| 737 GrAccelData* fAccelData; | |
| 738 | |
| 739 SkRecords::FillBounds fFillBounds; | |
| 740 }; | |
| 741 #endif | |
| 742 | |
| 561 } // namespace SkRecords | 743 } // namespace SkRecords |
| 562 | 744 |
| 563 void SkRecordFillBounds(const SkRect& cullRect, const SkRecord& record, SkBBoxHi erarchy* bbh) { | 745 void SkRecordFillBounds(const SkRect& cullRect, const SkRecord& record, SkBBoxHi erarchy* bbh) { |
| 564 SkRecords::FillBounds(cullRect, record, bbh); | 746 SkRecords::FillBounds visitor(cullRect, record, bbh); |
| 747 | |
| 748 for (unsigned curOp = 0; curOp < record.count(); curOp++) { | |
| 749 visitor.setCurrentOp(curOp); | |
| 750 record.visit<void>(curOp, visitor); | |
| 751 } | |
| 752 | |
| 753 visitor.cleanUp(); | |
| 565 } | 754 } |
| 755 | |
| 756 #if SK_SUPPORT_GPU | |
| 757 void SkRecordComputeLayers(const SkRect& cullRect, const SkRecord& record, | |
| 758 SkBBoxHierarchy* bbh, GrAccelData* data) { | |
| 759 SkRecords::CollectLayers visitor(cullRect, record, bbh, data); | |
| 760 | |
| 761 for (unsigned curOp = 0; curOp < record.count(); curOp++) { | |
| 762 visitor.setCurrentOp(curOp); | |
| 763 record.visit<void>(curOp, visitor); | |
| 764 } | |
| 765 | |
| 766 visitor.cleanUp(); | |
| 767 } | |
| 768 #endif | |
| 769 | |
| OLD | NEW |