| OLD | NEW |
| 1 | 1 |
| 2 /* | 2 /* |
| 3 * Copyright 2011 Google Inc. | 3 * Copyright 2011 Google Inc. |
| 4 * | 4 * |
| 5 * Use of this source code is governed by a BSD-style license that can be | 5 * Use of this source code is governed by a BSD-style license that can be |
| 6 * found in the LICENSE file. | 6 * found in the LICENSE file. |
| 7 */ | 7 */ |
| 8 | 8 |
| 9 | 9 |
| 10 #include "GrContext.h" | 10 #include "GrContext.h" |
| (...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 440 GrTexture* GrContext::lockAndRefScratchTexture(const GrTextureDesc& inDesc, Scra
tchTexMatch match) { | 440 GrTexture* GrContext::lockAndRefScratchTexture(const GrTextureDesc& inDesc, Scra
tchTexMatch match) { |
| 441 | 441 |
| 442 SkASSERT((inDesc.fFlags & kRenderTarget_GrTextureFlagBit) || | 442 SkASSERT((inDesc.fFlags & kRenderTarget_GrTextureFlagBit) || |
| 443 !(inDesc.fFlags & kNoStencil_GrTextureFlagBit)); | 443 !(inDesc.fFlags & kNoStencil_GrTextureFlagBit)); |
| 444 | 444 |
| 445 // Renderable A8 targets are not universally supported (e.g., not on ANGLE) | 445 // Renderable A8 targets are not universally supported (e.g., not on ANGLE) |
| 446 SkASSERT(this->isConfigRenderable(kAlpha_8_GrPixelConfig, inDesc.fSampleCnt
> 0) || | 446 SkASSERT(this->isConfigRenderable(kAlpha_8_GrPixelConfig, inDesc.fSampleCnt
> 0) || |
| 447 !(inDesc.fFlags & kRenderTarget_GrTextureFlagBit) || | 447 !(inDesc.fFlags & kRenderTarget_GrTextureFlagBit) || |
| 448 (inDesc.fConfig != kAlpha_8_GrPixelConfig)); | 448 (inDesc.fConfig != kAlpha_8_GrPixelConfig)); |
| 449 | 449 |
| 450 if (!fGpu->caps()->reuseScratchTextures()) { | 450 if (!fGpu->caps()->reuseScratchTextures() && |
| 451 // If we're never recycling scratch textures we can | 451 !(inDesc.fFlags & kRenderTarget_GrTextureFlagBit)) { |
| 452 // always make them the right size | 452 // If we're never recycling this texture we can always make it the right
size |
| 453 return create_scratch_texture(fGpu, fTextureCache, inDesc); | 453 return create_scratch_texture(fGpu, fTextureCache, inDesc); |
| 454 } | 454 } |
| 455 | 455 |
| 456 GrTextureDesc desc = inDesc; | 456 GrTextureDesc desc = inDesc; |
| 457 | 457 |
| 458 if (kApprox_ScratchTexMatch == match) { | 458 if (kApprox_ScratchTexMatch == match) { |
| 459 // bin by pow2 with a reasonable min | 459 // bin by pow2 with a reasonable min |
| 460 static const int MIN_SIZE = 16; | 460 static const int MIN_SIZE = 16; |
| 461 desc.fWidth = GrMax(MIN_SIZE, GrNextPow2(desc.fWidth)); | 461 desc.fWidth = GrMax(MIN_SIZE, GrNextPow2(desc.fWidth)); |
| 462 desc.fHeight = GrMax(MIN_SIZE, GrNextPow2(desc.fHeight)); | 462 desc.fHeight = GrMax(MIN_SIZE, GrNextPow2(desc.fHeight)); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 507 } | 507 } |
| 508 | 508 |
| 509 // This texture should already have a cache entry since it was once | 509 // This texture should already have a cache entry since it was once |
| 510 // attached | 510 // attached |
| 511 SkASSERT(NULL != texture->getCacheEntry()); | 511 SkASSERT(NULL != texture->getCacheEntry()); |
| 512 | 512 |
| 513 // Conceptually, the cache entry is going to assume responsibility | 513 // Conceptually, the cache entry is going to assume responsibility |
| 514 // for the creation ref. Assert refcnt == 1. | 514 // for the creation ref. Assert refcnt == 1. |
| 515 SkASSERT(texture->unique()); | 515 SkASSERT(texture->unique()); |
| 516 | 516 |
| 517 if (fGpu->caps()->reuseScratchTextures()) { | 517 if (fGpu->caps()->reuseScratchTextures() || NULL != texture->asRenderTarget(
)) { |
| 518 // Since this texture came from an AutoScratchTexture it should | 518 // Since this texture came from an AutoScratchTexture it should |
| 519 // still be in the exclusive pile. Recycle it. | 519 // still be in the exclusive pile. Recycle it. |
| 520 fTextureCache->makeNonExclusive(texture->getCacheEntry()); | 520 fTextureCache->makeNonExclusive(texture->getCacheEntry()); |
| 521 this->purgeCache(); | 521 this->purgeCache(); |
| 522 } else if (texture->getDeferredRefCount() <= 0) { | 522 } else if (texture->getDeferredRefCount() <= 0) { |
| 523 // When we aren't reusing textures we know this scratch texture | 523 // When we aren't reusing textures we know this scratch texture |
| 524 // will never be reused and would be just wasting time in the cache | 524 // will never be reused and would be just wasting time in the cache |
| 525 fTextureCache->makeNonExclusive(texture->getCacheEntry()); | 525 fTextureCache->makeNonExclusive(texture->getCacheEntry()); |
| 526 fTextureCache->deleteResource(texture->getCacheEntry()); | 526 fTextureCache->deleteResource(texture->getCacheEntry()); |
| 527 } else { | 527 } else { |
| 528 // In this case (fDeferredRefCount > 0) but the cache is the only | 528 // In this case (fDeferredRefCount > 0) but the cache is the only |
| 529 // one holding a real ref. Mark the object so when the deferred | 529 // one holding a real ref. Mark the object so when the deferred |
| 530 // ref count goes to 0 the texture will be deleted (remember | 530 // ref count goes to 0 the texture will be deleted (remember |
| 531 // in this code path scratch textures aren't getting reused). | 531 // in this code path scratch textures aren't getting reused). |
| 532 texture->setNeedsDeferredUnref(); | 532 texture->setNeedsDeferredUnref(); |
| 533 } | 533 } |
| 534 } | 534 } |
| 535 | 535 |
| 536 | 536 |
| 537 void GrContext::unlockScratchTexture(GrTexture* texture) { | 537 void GrContext::unlockScratchTexture(GrTexture* texture) { |
| 538 ASSERT_OWNED_RESOURCE(texture); | 538 ASSERT_OWNED_RESOURCE(texture); |
| 539 SkASSERT(NULL != texture->getCacheEntry()); | 539 SkASSERT(NULL != texture->getCacheEntry()); |
| 540 | 540 |
| 541 // If this is a scratch texture we detached it from the cache | 541 // If this is a scratch texture we detached it from the cache |
| 542 // while it was locked (to avoid two callers simultaneously getting | 542 // while it was locked (to avoid two callers simultaneously getting |
| 543 // the same texture). | 543 // the same texture). |
| 544 if (texture->getCacheEntry()->key().isScratch()) { | 544 if (texture->getCacheEntry()->key().isScratch()) { |
| 545 if (fGpu->caps()->reuseScratchTextures()) { | 545 if (fGpu->caps()->reuseScratchTextures() || NULL != texture->asRenderTar
get()) { |
| 546 fTextureCache->makeNonExclusive(texture->getCacheEntry()); | 546 fTextureCache->makeNonExclusive(texture->getCacheEntry()); |
| 547 this->purgeCache(); | 547 this->purgeCache(); |
| 548 } else if (texture->unique() && texture->getDeferredRefCount() <= 0) { | 548 } else if (texture->unique() && texture->getDeferredRefCount() <= 0) { |
| 549 // Only the cache now knows about this texture. Since we're never | 549 // Only the cache now knows about this texture. Since we're never |
| 550 // reusing scratch textures (in this code path) it would just be | 550 // reusing scratch textures (in this code path) it would just be |
| 551 // wasting time sitting in the cache. | 551 // wasting time sitting in the cache. |
| 552 fTextureCache->makeNonExclusive(texture->getCacheEntry()); | 552 fTextureCache->makeNonExclusive(texture->getCacheEntry()); |
| 553 fTextureCache->deleteResource(texture->getCacheEntry()); | 553 fTextureCache->deleteResource(texture->getCacheEntry()); |
| 554 } else { | 554 } else { |
| 555 // In this case (fRefCnt > 1 || defRefCnt > 0) but we don't really | 555 // In this case (fRefCnt > 1 || defRefCnt > 0) but we don't really |
| (...skipping 1253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1809 return NULL; | 1809 return NULL; |
| 1810 } | 1810 } |
| 1811 } | 1811 } |
| 1812 | 1812 |
| 1813 /////////////////////////////////////////////////////////////////////////////// | 1813 /////////////////////////////////////////////////////////////////////////////// |
| 1814 #if GR_CACHE_STATS | 1814 #if GR_CACHE_STATS |
| 1815 void GrContext::printCacheStats() const { | 1815 void GrContext::printCacheStats() const { |
| 1816 fTextureCache->printStats(); | 1816 fTextureCache->printStats(); |
| 1817 } | 1817 } |
| 1818 #endif | 1818 #endif |
| OLD | NEW |