OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #if SK_SUPPORT_GPU | 8 #if SK_SUPPORT_GPU |
9 | 9 |
10 #include "GrContext.h" | 10 #include "GrContext.h" |
11 #include "GrContextFactory.h" | 11 #include "GrContextFactory.h" |
12 #include "GrLayerCache.h" | 12 #include "GrLayerCache.h" |
| 13 #include "GrResourceCache.h" |
13 #include "SkPictureRecorder.h" | 14 #include "SkPictureRecorder.h" |
14 #include "Test.h" | 15 #include "Test.h" |
15 | 16 |
16 class TestingAccess { | 17 class TestingAccess { |
17 public: | 18 public: |
| 19 static int NumPlots() { |
| 20 return GrLayerCache::kNumPlotsX * GrLayerCache::kNumPlotsY; |
| 21 } |
| 22 static SkISize PlotSize() { |
| 23 return SkISize::Make(GrLayerCache::kAtlasTextureWidth / GrLayerCache::kN
umPlotsX, |
| 24 GrLayerCache::kAtlasTextureHeight / GrLayerCache::k
NumPlotsY); |
| 25 } |
| 26 |
| 27 static GrTexture* GetBackingTexture(GrLayerCache* cache) { |
| 28 return cache->fAtlas->getTextureOrNull(); |
| 29 } |
| 30 |
18 static int NumLayers(GrLayerCache* cache) { | 31 static int NumLayers(GrLayerCache* cache) { |
19 return cache->numLayers(); | 32 return cache->numLayers(); |
20 } | 33 } |
21 static void Purge(GrLayerCache* cache, uint32_t pictureID) { | 34 static void Purge(GrLayerCache* cache, uint32_t pictureID) { |
22 cache->purge(pictureID); | 35 cache->purge(pictureID); |
23 } | 36 } |
24 static int Uses(GrCachedLayer* layer) { | 37 static int Uses(GrCachedLayer* layer) { |
25 return layer->uses(); | 38 return layer->uses(); |
26 } | 39 } |
27 static GrCachedLayer* Find(GrLayerCache* cache, uint32_t pictureID, | 40 static GrCachedLayer* Find(GrLayerCache* cache, uint32_t pictureID, |
28 const SkMatrix& initialMat, | 41 const SkMatrix& initialMat, |
29 const int* key, int keySize) { | 42 const int* key, int keySize) { |
30 return cache->findLayer(pictureID, initialMat, key, keySize); | 43 return cache->findLayer(pictureID, initialMat, key, keySize); |
31 } | 44 } |
32 }; | 45 }; |
33 | 46 |
34 // Add several layers to the cache | 47 // Add several layers to the cache |
35 static void create_layers(skiatest::Reporter* reporter, | 48 static void create_layers(skiatest::Reporter* reporter, |
36 GrLayerCache* cache, | 49 GrLayerCache* cache, |
37 const SkPicture& picture, | 50 const SkPicture& picture, |
38 int numToAdd, | 51 int numToAdd, |
39 int idOffset) { | 52 int idOffset) { |
40 | 53 |
41 for (int i = 0; i < numToAdd; ++i) { | 54 for (int i = 0; i < numToAdd; ++i) { |
42 int indices[1] = { idOffset+i+1 }; | 55 int key[1] = { idOffset+i+1 }; |
43 GrCachedLayer* layer = cache->findLayerOrCreate(picture.uniqueID(), | 56 GrCachedLayer* layer = cache->findLayerOrCreate(picture.uniqueID(), |
44 idOffset+i+1, idOffset+i
+2, | 57 idOffset+i+1, idOffset+i
+2, |
45 SkIRect::MakeEmpty(), | 58 SkIRect::MakeEmpty(), |
46 SkIRect::MakeEmpty(), | 59 SkIRect::MakeEmpty(), |
47 SkMatrix::I(), | 60 SkMatrix::I(), |
48 indices, 1, | 61 key, 1, |
49 nullptr); | 62 nullptr); |
50 REPORTER_ASSERT(reporter, layer); | 63 REPORTER_ASSERT(reporter, layer); |
51 GrCachedLayer* temp = TestingAccess::Find(cache, picture.uniqueID(), SkM
atrix::I(), | 64 GrCachedLayer* temp = TestingAccess::Find(cache, picture.uniqueID(), SkM
atrix::I(), |
52 indices, 1); | 65 key, 1); |
53 REPORTER_ASSERT(reporter, temp == layer); | 66 REPORTER_ASSERT(reporter, temp == layer); |
54 | 67 |
55 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(cache) == idOffset +
i + 1); | 68 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(cache) == idOffset +
i + 1); |
56 | 69 |
57 REPORTER_ASSERT(reporter, picture.uniqueID() == layer->pictureID()); | 70 REPORTER_ASSERT(reporter, picture.uniqueID() == layer->pictureID()); |
58 REPORTER_ASSERT(reporter, layer->start() == idOffset + i + 1); | 71 REPORTER_ASSERT(reporter, layer->start() == idOffset + i + 1); |
59 REPORTER_ASSERT(reporter, layer->stop() == idOffset + i + 2); | 72 REPORTER_ASSERT(reporter, layer->stop() == idOffset + i + 2); |
60 REPORTER_ASSERT(reporter, nullptr == layer->texture()); | 73 REPORTER_ASSERT(reporter, nullptr == layer->texture()); |
61 REPORTER_ASSERT(reporter, nullptr == layer->paint()); | 74 REPORTER_ASSERT(reporter, nullptr == layer->paint()); |
62 REPORTER_ASSERT(reporter, !layer->isAtlased()); | 75 REPORTER_ASSERT(reporter, !layer->isAtlased()); |
63 } | 76 } |
64 } | 77 } |
65 | 78 |
66 static void lock_layer(skiatest::Reporter* reporter, | 79 static void lock_layer(skiatest::Reporter* reporter, |
67 GrLayerCache* cache, | 80 GrLayerCache* cache, |
68 GrCachedLayer* layer) { | 81 GrCachedLayer* layer) { |
69 // Make the layer 512x512 (so it can be atlased) | 82 // Make each layer big enough to consume one whole plot in the atlas |
70 GrSurfaceDesc desc; | 83 GrSurfaceDesc desc; |
71 desc.fWidth = 512; | 84 desc.fFlags = kRenderTarget_GrSurfaceFlag; |
72 desc.fHeight = 512; | 85 desc.fWidth = TestingAccess::PlotSize().fWidth; |
| 86 desc.fHeight = TestingAccess::PlotSize().fHeight; |
73 desc.fConfig = kSkia8888_GrPixelConfig; | 87 desc.fConfig = kSkia8888_GrPixelConfig; |
74 | 88 |
75 bool needsRerendering; | 89 bool needsRerendering; |
76 bool inAtlas = cache->tryToAtlas(layer, desc, &needsRerendering); | 90 bool inAtlas = cache->tryToAtlas(layer, desc, &needsRerendering); |
77 if (!inAtlas) { | 91 if (!inAtlas) { |
78 cache->lock(layer, desc, &needsRerendering); | 92 cache->lock(layer, desc, &needsRerendering); |
79 } | 93 } |
80 REPORTER_ASSERT(reporter, needsRerendering); | 94 REPORTER_ASSERT(reporter, needsRerendering); |
81 | 95 |
82 cache->lock(layer, desc, &needsRerendering); | 96 cache->lock(layer, desc, &needsRerendering); |
83 REPORTER_ASSERT(reporter, !needsRerendering); | 97 REPORTER_ASSERT(reporter, !needsRerendering); |
84 | 98 |
85 REPORTER_ASSERT(reporter, layer->texture()); | 99 REPORTER_ASSERT(reporter, layer->texture()); |
86 REPORTER_ASSERT(reporter, layer->locked()); | 100 REPORTER_ASSERT(reporter, layer->locked()); |
87 | 101 |
88 cache->addUse(layer); | 102 cache->addUse(layer); |
89 | 103 |
90 REPORTER_ASSERT(reporter, 1 == TestingAccess::Uses(layer)); | 104 REPORTER_ASSERT(reporter, 1 == TestingAccess::Uses(layer)); |
91 } | 105 } |
92 | 106 |
93 // This test case exercises the public API of the GrLayerCache class. | 107 // This test case exercises the public API of the GrLayerCache class. |
94 // In particular it checks its interaction with the resource cache (w.r.t. | 108 // In particular it checks its interaction with the resource cache (w.r.t. |
95 // locking & unlocking textures). | 109 // locking & unlocking textures). |
96 // TODO: need to add checks on VRAM usage! | 110 // TODO: need to add checks on VRAM usage! |
97 DEF_GPUTEST(GpuLayerCache, reporter, factory) { | 111 DEF_GPUTEST(GpuLayerCache, reporter, factory) { |
98 static const int kInitialNumLayers = 5; | 112 // Add one more layer than can fit in the atlas |
| 113 static const int kInitialNumLayers = TestingAccess::NumPlots() + 1; |
99 | 114 |
100 for (int i= 0; i < GrContextFactory::kGLContextTypeCnt; ++i) { | 115 #if GR_CACHE_STATS |
| 116 GrResourceCache::Stats stats; |
| 117 #endif |
| 118 |
| 119 for (int i = 0; i < GrContextFactory::kGLContextTypeCnt; ++i) { |
101 GrContextFactory::GLContextType glCtxType = (GrContextFactory::GLContext
Type) i; | 120 GrContextFactory::GLContextType glCtxType = (GrContextFactory::GLContext
Type) i; |
102 | 121 |
103 if (!GrContextFactory::IsRenderingGLContext(glCtxType)) { | 122 if (!GrContextFactory::IsRenderingGLContext(glCtxType)) { |
104 continue; | 123 continue; |
105 } | 124 } |
106 | 125 |
107 GrContext* context = factory->get(glCtxType); | 126 GrContext* context = factory->get(glCtxType); |
108 | 127 |
109 if (nullptr == context) { | 128 if (nullptr == context) { |
110 continue; | 129 continue; |
111 } | 130 } |
112 | 131 |
113 SkPictureRecorder recorder; | 132 SkAutoTUnref<const SkPicture> picture; |
114 SkCanvas* c = recorder.beginRecording(1, 1); | 133 |
115 // Draw something, anything, to prevent an empty-picture optimizatio
n, | 134 { |
116 // which is a singleton and never purged. | 135 SkPictureRecorder recorder; |
117 c->drawRect(SkRect::MakeWH(1,1), SkPaint()); | 136 SkCanvas* c = recorder.beginRecording(1, 1); |
118 SkAutoTUnref<const SkPicture> picture(recorder.endRecording()); | 137 // Draw something, anything, to prevent an empty-picture optimiz
ation, |
| 138 // which is a singleton and never purged. |
| 139 c->drawRect(SkRect::MakeWH(1,1), SkPaint()); |
| 140 picture.reset(recorder.endRecording()); |
| 141 } |
| 142 |
| 143 GrResourceCache* resourceCache = context->getResourceCache(); |
119 | 144 |
120 GrLayerCache cache(context); | 145 GrLayerCache cache(context); |
121 | 146 |
122 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); | 147 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); |
123 | 148 |
124 for (int i = 0; i < kInitialNumLayers; ++i) { | 149 for (int i = 0; i < kInitialNumLayers; ++i) { |
125 int indices[1] = { i + 1 }; | 150 int key[1] = { i + 1 }; |
126 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), | 151 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
127 indices, 1); | 152 key, 1); |
128 REPORTER_ASSERT(reporter, layer); | 153 REPORTER_ASSERT(reporter, layer); |
129 | 154 |
130 lock_layer(reporter, &cache, layer); | 155 lock_layer(reporter, &cache, layer); |
131 | 156 |
132 // The first 4 layers should be in the atlas (and thus have non-empt
y | 157 #if GR_CACHE_STATS |
133 // rects) | 158 resourceCache->getStats(&stats); |
134 if (i < 4) { | 159 #endif |
| 160 |
| 161 // The first 4 layers should be in the atlas (and thus have non-empt
y rects) |
| 162 if (i < TestingAccess::NumPlots()) { |
135 REPORTER_ASSERT(reporter, layer->isAtlased()); | 163 REPORTER_ASSERT(reporter, layer->isAtlased()); |
| 164 #if GR_CACHE_STATS |
| 165 REPORTER_ASSERT(reporter, 1 == stats.fTotal); |
| 166 #endif |
136 } else { | 167 } else { |
137 // The 5th layer couldn't fit in the atlas | 168 // The 5th layer couldn't fit in the atlas |
138 REPORTER_ASSERT(reporter, !layer->isAtlased()); | 169 REPORTER_ASSERT(reporter, !layer->isAtlased()); |
| 170 #if GR_CACHE_STATS |
| 171 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 172 #endif |
139 } | 173 } |
140 } | 174 } |
141 | 175 |
142 // Unlock the textures | 176 // Unlock the textures |
143 for (int i = 0; i < kInitialNumLayers; ++i) { | 177 for (int i = 0; i < kInitialNumLayers; ++i) { |
144 int indices[1] = { i+1 }; | 178 int key[1] = { i+1 }; |
145 | 179 |
146 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), | 180 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
147 indices, 1); | 181 key, 1); |
148 REPORTER_ASSERT(reporter, layer); | 182 REPORTER_ASSERT(reporter, layer); |
149 cache.removeUse(layer); | 183 cache.removeUse(layer); |
150 } | 184 } |
151 | 185 |
| 186 #if GR_CACHE_STATS |
| 187 resourceCache->getStats(&stats); |
| 188 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 189 // The floating layer is purgeable the cache is not |
| 190 REPORTER_ASSERT(reporter, 1 == stats.fNumPurgeable); |
| 191 REPORTER_ASSERT(reporter, 1 == stats.fNumNonPurgeable); |
| 192 #endif |
| 193 |
152 for (int i = 0; i < kInitialNumLayers; ++i) { | 194 for (int i = 0; i < kInitialNumLayers; ++i) { |
153 int indices[1] = { i+1 }; | 195 int key[1] = { i+1 }; |
154 | 196 |
155 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), | 197 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
156 indices, 1); | 198 key, 1); |
157 REPORTER_ASSERT(reporter, layer); | 199 REPORTER_ASSERT(reporter, layer); |
158 | 200 |
159 // All the layers should be unlocked | 201 // All the layers should be unlocked |
160 REPORTER_ASSERT(reporter, !layer->locked()); | 202 REPORTER_ASSERT(reporter, !layer->locked()); |
161 | 203 |
162 // When hoisted layers aren't cached they are aggressively removed | 204 // When hoisted layers aren't cached they are aggressively removed |
163 // from the atlas | 205 // from the atlas |
164 #if GR_CACHE_HOISTED_LAYERS | 206 #if GR_CACHE_HOISTED_LAYERS |
165 // The first 4 layers should still be in the atlas. | 207 // The first 4 layers should still be in the atlas. |
166 if (i < 4) { | 208 if (i < 4) { |
167 REPORTER_ASSERT(reporter, layer->texture()); | 209 REPORTER_ASSERT(reporter, layer->texture()); |
168 REPORTER_ASSERT(reporter, layer->isAtlased()); | 210 REPORTER_ASSERT(reporter, layer->isAtlased()); |
169 } else { | 211 } else { |
170 #endif | 212 #endif |
171 // The final layer should not be atlased. | 213 // The final layer should not be atlased. |
172 REPORTER_ASSERT(reporter, nullptr == layer->texture()); | 214 REPORTER_ASSERT(reporter, nullptr == layer->texture()); |
173 REPORTER_ASSERT(reporter, !layer->isAtlased()); | 215 REPORTER_ASSERT(reporter, !layer->isAtlased()); |
174 #if GR_CACHE_HOISTED_LAYERS | 216 #if GR_CACHE_HOISTED_LAYERS |
175 } | 217 } |
176 #endif | 218 #endif |
177 } | 219 } |
178 | 220 |
| 221 // Let go of the backing texture |
| 222 cache.end(); |
| 223 REPORTER_ASSERT(reporter, nullptr == TestingAccess::GetBackingTexture(&c
ache)); |
| 224 |
| 225 #if GR_CACHE_STATS |
| 226 resourceCache->getStats(&stats); |
| 227 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 228 // Now both the floater and the atlas are purgeable |
| 229 REPORTER_ASSERT(reporter, 2 == stats.fNumPurgeable); |
| 230 #endif |
| 231 |
| 232 // re-attach to the backing texture |
| 233 cache.begin(); |
| 234 REPORTER_ASSERT(reporter, TestingAccess::GetBackingTexture(&cache)); |
| 235 |
| 236 #if GR_CACHE_STATS |
| 237 resourceCache->getStats(&stats); |
| 238 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 239 // The atlas is restored to being non-purgeable |
| 240 REPORTER_ASSERT(reporter, 1 == stats.fNumPurgeable); |
| 241 REPORTER_ASSERT(reporter, 1 == stats.fNumNonPurgeable); |
| 242 #endif |
| 243 |
179 { | 244 { |
180 int indices[1] = { kInitialNumLayers+1 }; | 245 int key[1] = { kInitialNumLayers+1 }; |
181 | 246 |
182 // Add an additional layer. Since all the layers are unlocked this | 247 // Add an additional layer. Since all the layers are unlocked this |
183 // will force out the first atlased layer | 248 // will force out the first atlased layer |
184 create_layers(reporter, &cache, *picture, 1, kInitialNumLayers); | 249 create_layers(reporter, &cache, *picture, 1, kInitialNumLayers); |
185 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), | 250 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
186 indices, 1); | 251 key, 1); |
187 REPORTER_ASSERT(reporter, layer); | 252 REPORTER_ASSERT(reporter, layer); |
188 | 253 |
189 lock_layer(reporter, &cache, layer); | 254 lock_layer(reporter, &cache, layer); |
190 cache.removeUse(layer); | 255 cache.removeUse(layer); |
191 } | 256 } |
192 | 257 |
193 for (int i = 0; i < kInitialNumLayers+1; ++i) { | 258 for (int i = 0; i < kInitialNumLayers+1; ++i) { |
194 int indices[1] = { i+1 }; | 259 int key[1] = { i+1 }; |
195 | 260 |
196 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), | 261 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
197 indices, 1); | 262 key, 1); |
198 #if GR_CACHE_HOISTED_LAYERS | 263 #if GR_CACHE_HOISTED_LAYERS |
199 // 3 old layers plus the new one should be in the atlas. | 264 // 3 old layers plus the new one should be in the atlas. |
200 if (1 == i || 2 == i || 3 == i || 5 == i) { | 265 if (1 == i || 2 == i || 3 == i || 5 == i) { |
201 REPORTER_ASSERT(reporter, layer); | 266 REPORTER_ASSERT(reporter, layer); |
202 REPORTER_ASSERT(reporter, !layer->locked()); | 267 REPORTER_ASSERT(reporter, !layer->locked()); |
203 REPORTER_ASSERT(reporter, layer->texture()); | 268 REPORTER_ASSERT(reporter, layer->texture()); |
204 REPORTER_ASSERT(reporter, layer->isAtlased()); | 269 REPORTER_ASSERT(reporter, layer->isAtlased()); |
205 } else if (4 == i) { | 270 } else if (4 == i) { |
206 #endif | 271 #endif |
207 // The one that was never atlased should still be around | 272 // The one that was never atlased should still be around |
208 REPORTER_ASSERT(reporter, layer); | 273 REPORTER_ASSERT(reporter, layer); |
209 | 274 |
210 REPORTER_ASSERT(reporter, nullptr == layer->texture()); | 275 REPORTER_ASSERT(reporter, nullptr == layer->texture()); |
211 REPORTER_ASSERT(reporter, !layer->isAtlased()); | 276 REPORTER_ASSERT(reporter, !layer->isAtlased()); |
212 #if GR_CACHE_HOISTED_LAYERS | 277 #if GR_CACHE_HOISTED_LAYERS |
213 } else { | 278 } else { |
214 // The one bumped out of the atlas (i.e., 0) should be gone | 279 // The one bumped out of the atlas (i.e., 0) should be gone |
215 REPORTER_ASSERT(reporter, nullptr == layer); | 280 REPORTER_ASSERT(reporter, nullptr == layer); |
216 } | 281 } |
217 #endif | 282 #endif |
218 } | 283 } |
219 | 284 |
220 //-------------------------------------------------------------------- | 285 //-------------------------------------------------------------------- |
221 // Free them all SkGpuDevice-style. This will not free up the | 286 // Free them all SkGpuDevice-style. This will not free up the |
222 // atlas' texture but will eliminate all the layers. | 287 // atlas' texture but will eliminate all the layers. |
223 TestingAccess::Purge(&cache, picture->uniqueID()); | 288 TestingAccess::Purge(&cache, picture->uniqueID()); |
224 | 289 |
225 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); | 290 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); |
226 // TODO: add VRAM/resource cache check here | 291 |
| 292 #if GR_CACHE_STATS |
| 293 resourceCache->getStats(&stats); |
| 294 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 295 // Atlas isn't purgeable |
| 296 REPORTER_ASSERT(reporter, 1 == stats.fNumPurgeable); |
| 297 REPORTER_ASSERT(reporter, 1 == stats.fNumNonPurgeable); |
| 298 #endif |
227 | 299 |
228 //-------------------------------------------------------------------- | 300 //-------------------------------------------------------------------- |
229 // Test out the GrContext-style purge. This should remove all the layers | 301 // Test out the GrContext-style purge. This should remove all the layers |
230 // and the atlas. | 302 // and the atlas. |
231 // Re-create the layers | 303 // Re-create the layers |
232 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); | 304 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); |
233 | 305 |
234 // Free them again GrContext-style. This should free up everything. | 306 // Free them again GrContext-style. This should free up everything. |
235 cache.freeAll(); | 307 cache.freeAll(); |
236 | 308 |
237 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); | 309 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); |
238 // TODO: add VRAM/resource cache check here | 310 |
| 311 REPORTER_ASSERT(reporter, nullptr == TestingAccess::GetBackingTexture(&c
ache)); |
| 312 |
| 313 #if GR_CACHE_STATS |
| 314 resourceCache->getStats(&stats); |
| 315 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 316 REPORTER_ASSERT(reporter, 2 == stats.fNumPurgeable); |
| 317 #endif |
| 318 |
| 319 // Purge the resource cache ... |
| 320 resourceCache->purgeAllUnlocked(); |
| 321 |
| 322 #if GR_CACHE_STATS |
| 323 resourceCache->getStats(&stats); |
| 324 REPORTER_ASSERT(reporter, 0 == stats.fTotal); |
| 325 #endif |
| 326 |
| 327 // and try to re-attach to the backing texture. This should fail |
| 328 cache.begin(); |
| 329 REPORTER_ASSERT(reporter, nullptr == TestingAccess::GetBackingTexture(&c
ache)); |
239 | 330 |
240 //-------------------------------------------------------------------- | 331 //-------------------------------------------------------------------- |
241 // Test out the MessageBus-style purge. This will not free the atlas | 332 // Test out the MessageBus-style purge. This will not free the atlas |
242 // but should eliminate the free-floating layers. | 333 // but should eliminate the free-floating layers. |
243 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); | 334 create_layers(reporter, &cache, *picture, kInitialNumLayers, 0); |
244 | 335 |
| 336 // Allocate/use the layers |
| 337 for (int i = 0; i < kInitialNumLayers; ++i) { |
| 338 int key[1] = { i + 1 }; |
| 339 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
| 340 key, 1); |
| 341 REPORTER_ASSERT(reporter, layer); |
| 342 |
| 343 lock_layer(reporter, &cache, layer); |
| 344 } |
| 345 |
| 346 #if GR_CACHE_STATS |
| 347 resourceCache->getStats(&stats); |
| 348 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 349 REPORTER_ASSERT(reporter, 2 == stats.fNumNonPurgeable); |
| 350 #endif |
| 351 |
| 352 // Unlock the textures |
| 353 for (int i = 0; i < kInitialNumLayers; ++i) { |
| 354 int key[1] = { i+1 }; |
| 355 |
| 356 GrCachedLayer* layer = TestingAccess::Find(&cache, picture->uniqueID
(), SkMatrix::I(), |
| 357 key, 1); |
| 358 REPORTER_ASSERT(reporter, layer); |
| 359 cache.removeUse(layer); |
| 360 } |
| 361 |
245 picture.reset(nullptr); | 362 picture.reset(nullptr); |
246 cache.processDeletedPictures(); | 363 cache.processDeletedPictures(); |
247 | 364 |
248 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); | 365 REPORTER_ASSERT(reporter, TestingAccess::NumLayers(&cache) == 0); |
249 // TODO: add VRAM/resource cache check here | 366 |
| 367 #if GR_CACHE_STATS |
| 368 resourceCache->getStats(&stats); |
| 369 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 370 REPORTER_ASSERT(reporter, 1 == stats.fNumPurgeable); |
| 371 REPORTER_ASSERT(reporter, 1 == stats.fNumNonPurgeable); |
| 372 #endif |
| 373 |
| 374 cache.end(); |
| 375 |
| 376 #if GR_CACHE_STATS |
| 377 resourceCache->getStats(&stats); |
| 378 REPORTER_ASSERT(reporter, 2 == stats.fTotal); |
| 379 REPORTER_ASSERT(reporter, 2 == stats.fNumPurgeable); |
| 380 #endif |
250 } | 381 } |
251 } | 382 } |
252 | 383 |
253 #endif | 384 #endif |
OLD | NEW |