| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrAtlas.h" | 8 #include "GrAtlas.h" |
| 9 #include "GrGpu.h" | 9 #include "GrGpu.h" |
| 10 #include "GrLayerCache.h" | 10 #include "GrLayerCache.h" |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 110 | 110 |
| 111 // The atlas only lets go of its texture when the atlas is deleted. | 111 // The atlas only lets go of its texture when the atlas is deleted. |
| 112 fAtlas.free(); | 112 fAtlas.free(); |
| 113 // GrLayerCache always assumes an atlas exists so recreate it. The atlas | 113 // GrLayerCache always assumes an atlas exists so recreate it. The atlas |
| 114 // lazily allocates a replacement texture so reallocating a new | 114 // lazily allocates a replacement texture so reallocating a new |
| 115 // atlas here won't disrupt a GrContext::contextDestroyed or freeGpuResource
s. | 115 // atlas here won't disrupt a GrContext::contextDestroyed or freeGpuResource
s. |
| 116 // TODO: Make GrLayerCache lazily allocate the atlas manager? | 116 // TODO: Make GrLayerCache lazily allocate the atlas manager? |
| 117 this->initAtlas(); | 117 this->initAtlas(); |
| 118 } | 118 } |
| 119 | 119 |
| 120 GrCachedLayer* GrLayerCache::createLayer(const SkPicture* picture, | 120 GrCachedLayer* GrLayerCache::createLayer(uint32_t pictureID, |
| 121 int start, int stop, | 121 int start, int stop, |
| 122 const SkMatrix& ctm) { | 122 const SkMatrix& ctm) { |
| 123 SkASSERT(picture->uniqueID() != SK_InvalidGenID && start > 0 && stop > 0); | 123 SkASSERT(pictureID != SK_InvalidGenID && start > 0 && stop > 0); |
| 124 | 124 |
| 125 GrCachedLayer* layer = SkNEW_ARGS(GrCachedLayer, (picture->uniqueID(), start
, stop, ctm)); | 125 GrCachedLayer* layer = SkNEW_ARGS(GrCachedLayer, (pictureID, start, stop, ct
m)); |
| 126 fLayerHash.add(layer); | 126 fLayerHash.add(layer); |
| 127 return layer; | 127 return layer; |
| 128 } | 128 } |
| 129 | 129 |
| 130 GrCachedLayer* GrLayerCache::findLayer(const SkPicture* picture, | 130 GrCachedLayer* GrLayerCache::findLayer(uint32_t pictureID, |
| 131 int start, int stop, | 131 int start, int stop, |
| 132 const SkMatrix& ctm) { | 132 const SkMatrix& ctm) { |
| 133 SkASSERT(picture->uniqueID() != SK_InvalidGenID && start > 0 && stop > 0); | 133 SkASSERT(pictureID != SK_InvalidGenID && start > 0 && stop > 0); |
| 134 return fLayerHash.find(GrCachedLayer::Key(picture->uniqueID(), start, stop,
ctm)); | 134 return fLayerHash.find(GrCachedLayer::Key(pictureID, start, stop, ctm)); |
| 135 } | 135 } |
| 136 | 136 |
| 137 GrCachedLayer* GrLayerCache::findLayerOrCreate(const SkPicture* picture, | 137 GrCachedLayer* GrLayerCache::findLayerOrCreate(uint32_t pictureID, |
| 138 int start, int stop, | 138 int start, int stop, |
| 139 const SkMatrix& ctm) { | 139 const SkMatrix& ctm) { |
| 140 SkASSERT(picture->uniqueID() != SK_InvalidGenID && start > 0 && stop > 0); | 140 SkASSERT(pictureID != SK_InvalidGenID && start > 0 && stop > 0); |
| 141 GrCachedLayer* layer = fLayerHash.find(GrCachedLayer::Key(picture->uniqueID(
), | 141 GrCachedLayer* layer = fLayerHash.find(GrCachedLayer::Key(pictureID, start,
stop, ctm)); |
| 142 start, stop, ctm))
; | |
| 143 if (NULL == layer) { | 142 if (NULL == layer) { |
| 144 layer = this->createLayer(picture, start, stop, ctm); | 143 layer = this->createLayer(pictureID, start, stop, ctm); |
| 145 } | 144 } |
| 146 | 145 |
| 147 return layer; | 146 return layer; |
| 148 } | 147 } |
| 149 | 148 |
| 150 bool GrLayerCache::lock(GrCachedLayer* layer, const GrTextureDesc& desc) { | 149 bool GrLayerCache::lock(GrCachedLayer* layer, const GrTextureDesc& desc, bool do
ntAtlas) { |
| 151 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas->getTexture(), layer);) | 150 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas->getTexture(), layer);) |
| 152 | 151 |
| 153 if (layer->locked()) { | 152 if (layer->locked()) { |
| 154 // This layer is already locked | 153 // This layer is already locked |
| 155 #ifdef SK_DEBUG | 154 #ifdef SK_DEBUG |
| 156 if (layer->isAtlased()) { | 155 if (layer->isAtlased()) { |
| 157 // It claims to be atlased | 156 // It claims to be atlased |
| 157 SkASSERT(!dontAtlas); |
| 158 SkASSERT(layer->rect().width() == desc.fWidth); | 158 SkASSERT(layer->rect().width() == desc.fWidth); |
| 159 SkASSERT(layer->rect().height() == desc.fHeight); | 159 SkASSERT(layer->rect().height() == desc.fHeight); |
| 160 } | 160 } |
| 161 #endif | 161 #endif |
| 162 return true; | 162 return false; |
| 163 } | 163 } |
| 164 | 164 |
| 165 if (layer->isAtlased()) { | 165 if (layer->isAtlased()) { |
| 166 // Hooray it is still in the atlas - make sure it stays there | 166 // Hooray it is still in the atlas - make sure it stays there |
| 167 SkASSERT(!dontAtlas); |
| 167 layer->setLocked(true); | 168 layer->setLocked(true); |
| 168 fPlotLocks[layer->plot()->id()]++; | 169 fPlotLocks[layer->plot()->id()]++; |
| 169 return true; | 170 return false; |
| 170 } else if (PlausiblyAtlasable(desc.fWidth, desc.fHeight)) { | 171 } else if (!dontAtlas && PlausiblyAtlasable(desc.fWidth, desc.fHeight)) { |
| 171 // Not in the atlas - will it fit? | 172 // Not in the atlas - will it fit? |
| 172 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); | 173 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); |
| 173 if (NULL == pictInfo) { | 174 if (NULL == pictInfo) { |
| 174 pictInfo = SkNEW_ARGS(GrPictureInfo, (layer->pictureID())); | 175 pictInfo = SkNEW_ARGS(GrPictureInfo, (layer->pictureID())); |
| 175 fPictureHash.add(pictInfo); | 176 fPictureHash.add(pictInfo); |
| 176 } | 177 } |
| 177 | 178 |
| 178 SkIPoint16 loc; | 179 SkIPoint16 loc; |
| 179 for (int i = 0; i < 2; ++i) { // extra pass in case we fail to add but a
re able to purge | 180 for (int i = 0; i < 2; ++i) { // extra pass in case we fail to add but a
re able to purge |
| 180 GrPlot* plot = fAtlas->addToAtlas(&pictInfo->fPlotUsage, | 181 GrPlot* plot = fAtlas->addToAtlas(&pictInfo->fPlotUsage, |
| 181 desc.fWidth, desc.fHeight, | 182 desc.fWidth, desc.fHeight, |
| 182 NULL, &loc); | 183 NULL, &loc); |
| 183 // addToAtlas can allocate the backing texture | 184 // addToAtlas can allocate the backing texture |
| 184 SkDEBUGCODE(avl.setBackingTexture(fAtlas->getTexture())); | 185 SkDEBUGCODE(avl.setBackingTexture(fAtlas->getTexture())); |
| 185 if (NULL != plot) { | 186 if (NULL != plot) { |
| 186 // The layer was successfully added to the atlas | 187 // The layer was successfully added to the atlas |
| 187 GrIRect16 bounds = GrIRect16::MakeXYWH(loc.fX, loc.fY, | 188 GrIRect16 bounds = GrIRect16::MakeXYWH(loc.fX, loc.fY, |
| 188 SkToS16(desc.fWidth), | 189 SkToS16(desc.fWidth), |
| 189 SkToS16(desc.fHeight)); | 190 SkToS16(desc.fHeight)); |
| 190 layer->setTexture(fAtlas->getTexture(), bounds); | 191 layer->setTexture(fAtlas->getTexture(), bounds); |
| 191 layer->setPlot(plot); | 192 layer->setPlot(plot); |
| 192 layer->setLocked(true); | 193 layer->setLocked(true); |
| 193 fPlotLocks[layer->plot()->id()]++; | 194 fPlotLocks[layer->plot()->id()]++; |
| 194 return false; | 195 return true; |
| 195 } | 196 } |
| 196 | 197 |
| 197 // The layer was rejected by the atlas (even though we know it is | 198 // The layer was rejected by the atlas (even though we know it is |
| 198 // plausibly atlas-able). See if a plot can be purged and try again. | 199 // plausibly atlas-able). See if a plot can be purged and try again. |
| 199 if (!this->purgePlot()) { | 200 if (!this->purgePlot()) { |
| 200 break; // We weren't able to purge any plots | 201 break; // We weren't able to purge any plots |
| 201 } | 202 } |
| 202 } | 203 } |
| 203 } | 204 } |
| 204 | 205 |
| 205 // The texture wouldn't fit in the cache - give it it's own texture. | 206 // The texture wouldn't fit in the cache - give it it's own texture. |
| 206 // This path always uses a new scratch texture and (thus) doesn't cache anyt
hing. | 207 // This path always uses a new scratch texture and (thus) doesn't cache anyt
hing. |
| 207 // This can yield a lot of re-rendering | 208 // This can yield a lot of re-rendering |
| 208 SkAutoTUnref<GrTexture> tex(fContext->lockAndRefScratchTexture(desc, | 209 SkAutoTUnref<GrTexture> tex(fContext->lockAndRefScratchTexture(desc, |
| 209 GrContext::kApprox_Scrat
chTexMatch)); | 210 GrContext::kApprox_Scrat
chTexMatch)); |
| 210 | 211 |
| 211 layer->setTexture(tex, GrIRect16::MakeWH(SkToS16(desc.fWidth), SkToS16(desc.
fHeight))); | 212 layer->setTexture(tex, GrIRect16::MakeWH(SkToS16(desc.fWidth), SkToS16(desc.
fHeight))); |
| 212 layer->setLocked(true); | 213 layer->setLocked(true); |
| 213 return false; | 214 return true; |
| 214 } | 215 } |
| 215 | 216 |
| 216 void GrLayerCache::unlock(GrCachedLayer* layer) { | 217 void GrLayerCache::unlock(GrCachedLayer* layer) { |
| 217 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas->getTexture(), layer);) | 218 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas->getTexture(), layer);) |
| 218 | 219 |
| 219 if (NULL == layer || !layer->locked()) { | 220 if (NULL == layer || !layer->locked()) { |
| 220 // invalid or not locked | 221 // invalid or not locked |
| 221 return; | 222 return; |
| 222 } | 223 } |
| 223 | 224 |
| 224 if (layer->isAtlased()) { | 225 if (layer->isAtlased()) { |
| 225 const int plotID = layer->plot()->id(); | 226 const int plotID = layer->plot()->id(); |
| 226 | 227 |
| 227 SkASSERT(fPlotLocks[plotID] > 0); | 228 SkASSERT(fPlotLocks[plotID] > 0); |
| 228 fPlotLocks[plotID]--; | 229 fPlotLocks[plotID]--; |
| 229 // At this point we could aggressively clear out un-locked plots but | 230 // At this point we could aggressively clear out un-locked plots but |
| 230 // by delaying we may be able to reuse some of the atlased layers later. | 231 // by delaying we may be able to reuse some of the atlased layers later. |
| 231 #if 0 | 232 #if DISABLE_CACHING |
| 232 // This testing code aggressively removes the atlased layers. This | 233 // This testing code aggressively removes the atlased layers. This |
| 233 // can be used to separate the performance contribution of less | 234 // can be used to separate the performance contribution of less |
| 234 // render target pingponging from that due to the re-use of cached layer
s | 235 // render target pingponging from that due to the re-use of cached layer
s |
| 235 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); | 236 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); |
| 236 SkASSERT(NULL != pictInfo); | 237 SkASSERT(NULL != pictInfo); |
| 237 | 238 |
| 238 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, layer->plot()); | 239 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, layer->plot()); |
| 239 | 240 |
| 240 layer->setPlot(NULL); | 241 layer->setPlot(NULL); |
| 241 layer->setTexture(NULL, GrIRect16::MakeEmpty()); | 242 layer->setTexture(NULL, GrIRect16::MakeEmpty()); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 257 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::ConstIter iter(&fLayerHas
h); | 258 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::ConstIter iter(&fLayerHas
h); |
| 258 for (; !iter.done(); ++iter) { | 259 for (; !iter.done(); ++iter) { |
| 259 const GrCachedLayer* layer = &(*iter); | 260 const GrCachedLayer* layer = &(*iter); |
| 260 | 261 |
| 261 layer->validate(fAtlas->getTexture()); | 262 layer->validate(fAtlas->getTexture()); |
| 262 | 263 |
| 263 const GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); | 264 const GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID()); |
| 264 if (NULL != pictInfo) { | 265 if (NULL != pictInfo) { |
| 265 // In aggressive cleanup mode a picture info should only exist if | 266 // In aggressive cleanup mode a picture info should only exist if |
| 266 // it has some atlased layers | 267 // it has some atlased layers |
| 268 #if !DISABLE_CACHING |
| 267 SkASSERT(!pictInfo->fPlotUsage.isEmpty()); | 269 SkASSERT(!pictInfo->fPlotUsage.isEmpty()); |
| 270 #endif |
| 268 } else { | 271 } else { |
| 269 // If there is no picture info for this layer then all of its | 272 // If there is no picture info for this layer then all of its |
| 270 // layers should be non-atlased. | 273 // layers should be non-atlased. |
| 271 SkASSERT(!layer->isAtlased()); | 274 SkASSERT(!layer->isAtlased()); |
| 272 } | 275 } |
| 273 | 276 |
| 274 if (NULL != layer->plot()) { | 277 if (NULL != layer->plot()) { |
| 275 SkASSERT(NULL != pictInfo); | 278 SkASSERT(NULL != pictInfo); |
| 276 SkASSERT(pictInfo->fPictureID == layer->pictureID()); | 279 SkASSERT(pictInfo->fPictureID == layer->pictureID()); |
| 277 | 280 |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 334 | 337 |
| 335 GrAtlas::PlotIter iter; | 338 GrAtlas::PlotIter iter; |
| 336 GrPlot* plot; | 339 GrPlot* plot; |
| 337 for (plot = fAtlas->iterInit(&iter, GrAtlas::kLRUFirst_IterOrder); | 340 for (plot = fAtlas->iterInit(&iter, GrAtlas::kLRUFirst_IterOrder); |
| 338 NULL != plot; | 341 NULL != plot; |
| 339 plot = iter.prev()) { | 342 plot = iter.prev()) { |
| 340 if (fPlotLocks[plot->id()] > 0) { | 343 if (fPlotLocks[plot->id()] > 0) { |
| 341 continue; | 344 continue; |
| 342 } | 345 } |
| 343 | 346 |
| 344 // We need to find all the layers in 'plot' and remove them. | 347 this->purgePlot(plot); |
| 345 SkTDArray<GrCachedLayer*> toBeRemoved; | |
| 346 | |
| 347 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash
); | |
| 348 for (; !iter.done(); ++iter) { | |
| 349 if (plot == (*iter).plot()) { | |
| 350 *toBeRemoved.append() = &(*iter); | |
| 351 } | |
| 352 } | |
| 353 | |
| 354 for (int i = 0; i < toBeRemoved.count(); ++i) { | |
| 355 SkASSERT(!toBeRemoved[i]->locked()); | |
| 356 | |
| 357 GrPictureInfo* pictInfo = fPictureHash.find(toBeRemoved[i]->pictureI
D()); | |
| 358 SkASSERT(NULL != pictInfo); | |
| 359 | |
| 360 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, plot); | |
| 361 | |
| 362 // Aggressively remove layers and, if now totally uncached, picture
info | |
| 363 fLayerHash.remove(GrCachedLayer::GetKey(*toBeRemoved[i])); | |
| 364 SkDELETE(toBeRemoved[i]); | |
| 365 | |
| 366 if (pictInfo->fPlotUsage.isEmpty()) { | |
| 367 fPictureHash.remove(pictInfo->fPictureID); | |
| 368 SkDELETE(pictInfo); | |
| 369 } | |
| 370 } | |
| 371 | |
| 372 plot->resetRects(); | |
| 373 return true; | 348 return true; |
| 374 } | 349 } |
| 375 | 350 |
| 376 return false; | 351 return false; |
| 377 } | 352 } |
| 378 | 353 |
| 354 void GrLayerCache::purgePlot(GrPlot* plot) { |
| 355 SkASSERT(0 == fPlotLocks[plot->id()]); |
| 356 |
| 357 // We need to find all the layers in 'plot' and remove them. |
| 358 SkTDArray<GrCachedLayer*> toBeRemoved; |
| 359 |
| 360 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash); |
| 361 for (; !iter.done(); ++iter) { |
| 362 if (plot == (*iter).plot()) { |
| 363 *toBeRemoved.append() = &(*iter); |
| 364 } |
| 365 } |
| 366 |
| 367 for (int i = 0; i < toBeRemoved.count(); ++i) { |
| 368 SkASSERT(!toBeRemoved[i]->locked()); |
| 369 |
| 370 GrPictureInfo* pictInfo = fPictureHash.find(toBeRemoved[i]->pictureID())
; |
| 371 SkASSERT(NULL != pictInfo); |
| 372 |
| 373 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, plot); |
| 374 |
| 375 // Aggressively remove layers and, if now totally uncached, picture info |
| 376 fLayerHash.remove(GrCachedLayer::GetKey(*toBeRemoved[i])); |
| 377 SkDELETE(toBeRemoved[i]); |
| 378 |
| 379 if (pictInfo->fPlotUsage.isEmpty()) { |
| 380 fPictureHash.remove(pictInfo->fPictureID); |
| 381 SkDELETE(pictInfo); |
| 382 } |
| 383 } |
| 384 |
| 385 plot->resetRects(); |
| 386 } |
| 387 |
| 388 void GrLayerCache::purgeAll() { |
| 389 GrAtlas::PlotIter iter; |
| 390 GrPlot* plot; |
| 391 for (plot = fAtlas->iterInit(&iter, GrAtlas::kLRUFirst_IterOrder); |
| 392 NULL != plot; |
| 393 plot = iter.prev()) { |
| 394 SkASSERT(0 == fPlotLocks[plot->id()]); |
| 395 |
| 396 this->purgePlot(plot); |
| 397 } |
| 398 } |
| 399 |
| 379 class GrPictureDeletionListener : public SkPicture::DeletionListener { | 400 class GrPictureDeletionListener : public SkPicture::DeletionListener { |
| 380 virtual void onDeletion(uint32_t pictureID) SK_OVERRIDE{ | 401 virtual void onDeletion(uint32_t pictureID) SK_OVERRIDE{ |
| 381 const GrPictureDeletedMessage message = { pictureID }; | 402 const GrPictureDeletedMessage message = { pictureID }; |
| 382 SkMessageBus<GrPictureDeletedMessage>::Post(message); | 403 SkMessageBus<GrPictureDeletedMessage>::Post(message); |
| 383 } | 404 } |
| 384 }; | 405 }; |
| 385 | 406 |
| 386 void GrLayerCache::trackPicture(const SkPicture* picture) { | 407 void GrLayerCache::trackPicture(const SkPicture* picture) { |
| 387 if (NULL == fDeletionListener) { | 408 if (NULL == fDeletionListener) { |
| 388 fDeletionListener.reset(SkNEW(GrPictureDeletionListener)); | 409 fDeletionListener.reset(SkNEW(GrPictureDeletionListener)); |
| 389 } | 410 } |
| 390 | 411 |
| 391 picture->addDeletionListener(fDeletionListener); | 412 picture->addDeletionListener(fDeletionListener); |
| 392 } | 413 } |
| 393 | 414 |
| 394 void GrLayerCache::processDeletedPictures() { | 415 void GrLayerCache::processDeletedPictures() { |
| 395 SkTDArray<GrPictureDeletedMessage> deletedPictures; | 416 SkTDArray<GrPictureDeletedMessage> deletedPictures; |
| 396 fPictDeletionInbox.poll(&deletedPictures); | 417 fPictDeletionInbox.poll(&deletedPictures); |
| 397 | 418 |
| 398 for (int i = 0; i < deletedPictures.count(); i++) { | 419 for (int i = 0; i < deletedPictures.count(); i++) { |
| 399 this->purge(deletedPictures[i].pictureID); | 420 this->purge(deletedPictures[i].pictureID); |
| 400 } | 421 } |
| 401 } | 422 } |
| 402 | 423 |
| OLD | NEW |