OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/tiles/image_decode_controller.h" | 5 #include "cc/tiles/image_decode_controller.h" |
6 | 6 |
| 7 #include "base/memory/discardable_memory.h" |
7 #include "cc/debug/devtools_instrumentation.h" | 8 #include "cc/debug/devtools_instrumentation.h" |
| 9 #include "third_party/skia/include/core/SkCanvas.h" |
8 | 10 |
9 namespace cc { | 11 namespace cc { |
10 namespace { | 12 namespace { |
11 | 13 |
| 14 // The amount of memory we can lock ahead of time (100MB). This limit is only |
| 15 // used to inform the caller of the amount of space available in the cache. The |
| 16 // caller can still request tasks which can cause this limit to be breached. |
| 17 const size_t kLockedMemoryLimitBytes = 100 * 1024 * 1024; |
| 18 |
| 19 // The number of entries to keep around in the cache. This limit can be breached |
| 20 // if more items are locked. That is, locked items ignore this limit. |
| 21 const size_t kMaxItemsInCache = 100; |
| 22 |
12 class ImageDecodeTaskImpl : public ImageDecodeTask { | 23 class ImageDecodeTaskImpl : public ImageDecodeTask { |
13 public: | 24 public: |
14 ImageDecodeTaskImpl(ImageDecodeController* controller, | 25 ImageDecodeTaskImpl(ImageDecodeController* controller, |
15 const SkImage* image, | 26 const ImageDecodeController::ImageKey& image_key, |
16 int layer_id, | 27 const DrawImage& image, |
17 uint64_t source_prepare_tiles_id) | 28 uint64_t source_prepare_tiles_id) |
18 : controller_(controller), | 29 : controller_(controller), |
19 image_(skia::SharePtr(image)), | 30 image_key_(image_key), |
20 layer_id_(layer_id), | 31 image_(image), |
| 32 image_ref_(skia::SharePtr(image.image())), |
21 source_prepare_tiles_id_(source_prepare_tiles_id) {} | 33 source_prepare_tiles_id_(source_prepare_tiles_id) {} |
22 | 34 |
23 // Overridden from Task: | 35 // Overridden from Task: |
24 void RunOnWorkerThread() override { | 36 void RunOnWorkerThread() override { |
25 TRACE_EVENT1("cc", "ImageDecodeTaskImpl::RunOnWorkerThread", | 37 TRACE_EVENT1("cc", "ImageDecodeTaskImpl::RunOnWorkerThread", |
26 "source_prepare_tiles_id", source_prepare_tiles_id_); | 38 "source_prepare_tiles_id", source_prepare_tiles_id_); |
27 devtools_instrumentation::ScopedImageDecodeTask image_decode_task( | 39 devtools_instrumentation::ScopedImageDecodeTask image_decode_task( |
28 image_.get()); | 40 image_ref_.get()); |
29 controller_->DecodeImage(image_.get()); | 41 controller_->DecodeImage(image_key_, image_); |
30 | |
31 // Release the reference after decoding image to ensure that it is not kept | |
32 // alive unless needed. | |
33 image_.clear(); | |
34 } | 42 } |
35 | 43 |
36 // Overridden from TileTask: | 44 // Overridden from TileTask: |
37 void ScheduleOnOriginThread(TileTaskClient* client) override {} | 45 void ScheduleOnOriginThread(TileTaskClient* client) override {} |
38 void CompleteOnOriginThread(TileTaskClient* client) override { | 46 void CompleteOnOriginThread(TileTaskClient* client) override { |
39 controller_->OnImageDecodeTaskCompleted(layer_id_, image_.get(), | 47 controller_->OnImageDecodeTaskCompleted(image_key_, !HasFinishedRunning()); |
40 !HasFinishedRunning()); | |
41 } | 48 } |
42 | 49 |
43 protected: | 50 protected: |
44 ~ImageDecodeTaskImpl() override {} | 51 ~ImageDecodeTaskImpl() override {} |
45 | 52 |
46 private: | 53 private: |
47 ImageDecodeController* controller_; | 54 ImageDecodeController* controller_; |
48 skia::RefPtr<const SkImage> image_; | 55 ImageDecodeController::ImageKey image_key_; |
49 int layer_id_; | 56 DrawImage image_; |
| 57 skia::RefPtr<const SkImage> image_ref_; |
50 uint64_t source_prepare_tiles_id_; | 58 uint64_t source_prepare_tiles_id_; |
51 | 59 |
52 DISALLOW_COPY_AND_ASSIGN(ImageDecodeTaskImpl); | 60 DISALLOW_COPY_AND_ASSIGN(ImageDecodeTaskImpl); |
53 }; | 61 }; |
54 | 62 |
55 } // namespace | 63 } // namespace |
56 | 64 |
57 ImageDecodeController::ImageDecodeController() {} | 65 ImageDecodeController::ImageDecodeController() |
| 66 : locked_images_budget_(kLockedMemoryLimitBytes) {} |
58 | 67 |
59 ImageDecodeController::~ImageDecodeController() {} | 68 ImageDecodeController::~ImageDecodeController() {} |
60 | 69 |
61 scoped_refptr<ImageDecodeTask> ImageDecodeController::GetTaskForImage( | 70 scoped_refptr<ImageDecodeTask> ImageDecodeController::GetTaskForImageAndRef( |
62 const DrawImage& image, | 71 const DrawImage& image, |
63 int layer_id, | |
64 uint64_t prepare_tiles_id) { | 72 uint64_t prepare_tiles_id) { |
65 uint32_t generation_id = image.image()->uniqueID(); | 73 // If the image already exist or if we're going to create a task for it, then |
66 scoped_refptr<ImageDecodeTask>& decode_task = | 74 // we need to ref this image. That means the image is or will be in the cache. |
67 image_decode_tasks_[layer_id][generation_id]; | 75 // When the ref goes to 0, it will be unpinned but will remain in the cache. |
68 if (!decode_task) | 76 // If the image does not fit into the budget, then we don't ref this image, |
69 decode_task = CreateTaskForImage(image.image(), layer_id, prepare_tiles_id); | 77 // since it will be decoded at raster time which is when it will be |
70 return decode_task; | 78 // temporarily put in the cache. |
| 79 ImageKey key = ImageKey::FromDrawImage(image); |
| 80 |
| 81 // Image already exists. |
| 82 if (LockDecodedImageIfPossibleAndRef(key)) |
| 83 return nullptr; |
| 84 |
| 85 base::AutoLock lock(lock_); |
| 86 |
| 87 scoped_refptr<ImageDecodeTask>& task = pending_image_tasks_[key]; |
| 88 if (task) { |
| 89 ++decoded_images_ref_counts_[key]; |
| 90 return task; |
| 91 } |
| 92 |
| 93 // Image won't fit into the cache, we'll scale it at raster time. |
| 94 if (locked_images_budget_.AvailableMemoryBytes() < key.target_bytes()) |
| 95 return nullptr; |
| 96 |
| 97 // We have account for memory usage when we create the task, not when the task |
| 98 // puts an image in the cache, because we need to ensure that we don't create |
| 99 // more tasks than we would have memory. That is, since tasks are created up |
| 100 // front, it will be too late to count memory when they run. |
| 101 locked_images_budget_.AddUsage(key.target_bytes()); |
| 102 |
| 103 task = CreateTaskForImage(key, image, prepare_tiles_id); |
| 104 |
| 105 ++decoded_images_ref_counts_[key]; |
| 106 return task; |
| 107 } |
| 108 |
| 109 void ImageDecodeController::UnrefImage(const DrawImage& image) { |
| 110 // When we unref the image, there are several situations we need to consider: |
| 111 // 1. This image was not even scheduled to be in the locked cache, which means |
| 112 // that the ref count doesn't even exist. |
| 113 // 2. The ref did not reach 0, which means we have to keep the image locked. |
| 114 // 3. The ref reached 0, we should unlock it. |
| 115 // 3a. The image isn't in the locked cache because we didn't get to decode |
| 116 // it yet (this is different from (1.) since it was actually scheduled |
| 117 // to be decoded). |
| 118 // 3b. Unlock the image but keep it in list. |
| 119 const ImageKey& key = ImageKey::FromDrawImage(image); |
| 120 |
| 121 base::AutoLock lock(lock_); |
| 122 auto ref_count_it = decoded_images_ref_counts_.find(key); |
| 123 if (ref_count_it == decoded_images_ref_counts_.end()) |
| 124 return; |
| 125 |
| 126 --ref_count_it->second; |
| 127 if (ref_count_it->second == 0) { |
| 128 decoded_images_ref_counts_.erase(key); |
| 129 locked_images_budget_.SubtractUsage(key.target_bytes()); |
| 130 |
| 131 auto decoded_image_it = |
| 132 std::find_if(decoded_images_.begin(), decoded_images_.end(), |
| 133 [key](const AnnotatedDecodedImage& decoded_image) { |
| 134 return key == decoded_image.first; |
| 135 }); |
| 136 // If we've never decoded the image before ref reached 0, then we wouldn't |
| 137 // have it in our cache. This would happen if we canceled tasks. |
| 138 if (decoded_image_it == decoded_images_.end()) |
| 139 return; |
| 140 DCHECK(decoded_image_it->second->is_locked()); |
| 141 decoded_image_it->second->Unlock(); |
| 142 } |
| 143 } |
| 144 |
| 145 bool ImageDecodeController::LockDecodedImageIfPossibleAndRef( |
| 146 const ImageKey& key) { |
| 147 base::AutoLock lock(lock_); |
| 148 auto decoded_images_iterator = |
| 149 std::find_if(decoded_images_.begin(), decoded_images_.end(), |
| 150 [key](const AnnotatedDecodedImage& annotated_image) { |
| 151 return key == annotated_image.first; |
| 152 }); |
| 153 if (decoded_images_iterator == decoded_images_.end()) |
| 154 return false; |
| 155 |
| 156 AnnotatedDecodedImage decoded_image = *decoded_images_iterator; |
| 157 decoded_images_.erase(decoded_images_iterator); |
| 158 |
| 159 // Figure out if the image is locked or try to lock it. |
| 160 bool locked = decoded_image.second->is_locked(); |
| 161 if (!locked) { |
| 162 locked = decoded_image.second->Lock(); |
| 163 if (!locked) |
| 164 return false; |
| 165 |
| 166 locked_images_budget_.AddUsage(key.target_bytes()); |
| 167 } |
| 168 |
| 169 // If the image is locked or we locked it, then it has to be in the cache. |
| 170 decoded_images_.push_back(decoded_image); |
| 171 ++decoded_images_ref_counts_[key]; |
| 172 return true; |
71 } | 173 } |
72 | 174 |
73 scoped_refptr<ImageDecodeTask> ImageDecodeController::CreateTaskForImage( | 175 scoped_refptr<ImageDecodeTask> ImageDecodeController::CreateTaskForImage( |
74 const SkImage* image, | 176 const ImageKey& key, |
75 int layer_id, | 177 const DrawImage& image, |
76 uint64_t prepare_tiles_id) { | 178 uint64_t prepare_tiles_id) { |
77 return make_scoped_refptr( | 179 return make_scoped_refptr( |
78 new ImageDecodeTaskImpl(this, image, layer_id, prepare_tiles_id)); | 180 new ImageDecodeTaskImpl(this, key, image, prepare_tiles_id)); |
79 } | 181 } |
80 | 182 |
81 void ImageDecodeController::DecodeImage(const SkImage* image) { | 183 void ImageDecodeController::DecodeImage(const ImageKey& key, |
82 image->preroll(); | 184 const DrawImage& image) { |
83 } | 185 if (!CanHandleFilterQuality(image.filter_quality())) |
84 | |
85 void ImageDecodeController::AddLayerUsedCount(int layer_id) { | |
86 ++used_layer_counts_[layer_id]; | |
87 } | |
88 | |
89 void ImageDecodeController::SubtractLayerUsedCount(int layer_id) { | |
90 if (--used_layer_counts_[layer_id]) | |
91 return; | 186 return; |
92 | 187 |
93 // Clean up decode tasks once a layer is no longer used. | 188 scoped_refptr<DecodedImage> decoded_image = |
94 used_layer_counts_.erase(layer_id); | 189 DecodeImageInternal(key, image.image()); |
95 image_decode_tasks_.erase(layer_id); | 190 |
96 } | 191 // Add the image to the cache. Don't add the budget usage, since it was |
97 | 192 // already handled by the code that created the task for this decode. |
98 void ImageDecodeController::OnImageDecodeTaskCompleted(int layer_id, | 193 base::AutoLock lock(lock_); |
99 const SkImage* image, | 194 decoded_images_.push_back(AnnotatedDecodedImage(key, decoded_image)); |
| 195 } |
| 196 |
| 197 scoped_refptr<ImageDecodeController::DecodedImage> |
| 198 ImageDecodeController::DecodeImageInternal(const ImageKey& key, |
| 199 const SkImage* image) { |
| 200 // TODO(vmpstr, reed): Scale the image here without caching it in skia. |
| 201 SkImageInfo info = SkImageInfo::MakeN32Premul(key.target_size().width(), |
| 202 key.target_size().height()); |
| 203 scoped_ptr<base::DiscardableMemory> locked_memory = |
| 204 base::DiscardableMemoryAllocator::GetInstance() |
| 205 ->AllocateLockedDiscardableMemory(info.minRowBytes() * info.height()); |
| 206 |
| 207 skia::RefPtr<SkCanvas> canvas = skia::AdoptRef(SkCanvas::NewRasterDirect( |
| 208 info, locked_memory->data(), info.minRowBytes())); |
| 209 canvas->setMatrix(SkMatrix::MakeRectToRect( |
| 210 SkRect::MakeWH(image->width(), image->height()), |
| 211 SkRect::MakeWH(key.target_size().width(), key.target_size().height()), |
| 212 SkMatrix::kFill_ScaleToFit)); |
| 213 canvas->clear(SK_ColorTRANSPARENT); |
| 214 SkPaint paint; |
| 215 paint.setFilterQuality(kHigh_SkFilterQuality); |
| 216 canvas->drawImage(image, 0, 0, &paint); |
| 217 canvas->flush(); |
| 218 |
| 219 return make_scoped_refptr(new DecodedImage(info, locked_memory.Pass())); |
| 220 } |
| 221 |
| 222 void ImageDecodeController::OnImageDecodeTaskCompleted(const ImageKey& key, |
100 bool was_canceled) { | 223 bool was_canceled) { |
101 // If the task has successfully finished, then keep the task until the layer | 224 pending_image_tasks_.erase(key); |
102 // is no longer in use. This ensures that we only decode a image once. | 225 } |
103 // TODO(vmpstr): Remove this when decode lifetime is controlled by cc. | 226 |
104 if (!was_canceled) | 227 DecodedDrawImage ImageDecodeController::GetDecodedImage( |
| 228 const DrawImage& image) { |
| 229 if (!CanHandleFilterQuality(image.filter_quality())) { |
| 230 return DecodedDrawImage(image.image(), SkSize::Make(1.f, 1.f), |
| 231 image.filter_quality()); |
| 232 } |
| 233 |
| 234 ImageKey key = ImageKey::FromDrawImage(image); |
| 235 |
| 236 base::AutoLock lock(lock_); |
| 237 auto decoded_images_iterator = |
| 238 std::find_if(decoded_images_.begin(), decoded_images_.end(), |
| 239 [key](const AnnotatedDecodedImage& annotated_image) { |
| 240 return key == annotated_image.first; |
| 241 }); |
| 242 scoped_refptr<DecodedImage> decoded_image; |
| 243 if (decoded_images_iterator != decoded_images_.end()) { |
| 244 decoded_image = decoded_images_iterator->second; |
| 245 if (!decoded_image->is_locked()) { |
| 246 if (decoded_image->Lock()) { |
| 247 locked_images_budget_.AddUsage(key.target_bytes()); |
| 248 } else { |
| 249 decoded_images_.erase(decoded_images_iterator); |
| 250 decoded_image = nullptr; |
| 251 } |
| 252 } |
| 253 } |
| 254 |
| 255 if (!decoded_image) { |
| 256 // This means that we didn't have an image task to decode this (otherwise it |
| 257 // would have run and locked the image already). So, we need to decode/scale |
| 258 // in place. |
| 259 decoded_image = DecodeImageInternal(key, image.image()); |
| 260 decoded_images_.push_back(AnnotatedDecodedImage(key, decoded_image)); |
| 261 locked_images_budget_.AddUsage(key.target_bytes()); |
| 262 } |
| 263 |
| 264 DCHECK(decoded_image->is_locked()) << key.ToString(); |
| 265 float x_scale = |
| 266 key.target_size().width() / static_cast<float>(image.image()->width()); |
| 267 float y_scale = |
| 268 key.target_size().height() / static_cast<float>(image.image()->height()); |
| 269 |
| 270 ++decoded_images_ref_counts_[key]; |
| 271 return DecodedDrawImage(decoded_image->image(), |
| 272 SkSize::Make(x_scale, y_scale), kLow_SkFilterQuality); |
| 273 } |
| 274 |
| 275 void ImageDecodeController::DrawWithImageFinished(const DrawImage& image) { |
| 276 if (!CanHandleFilterQuality(image.filter_quality())) |
105 return; | 277 return; |
106 | 278 UnrefImage(image); |
107 // Otherwise, we have to clean up the task so that a new one can be created if | 279 } |
108 // we need to decode the image again. | 280 |
109 LayerImageTaskMap::iterator layer_it = image_decode_tasks_.find(layer_id); | 281 bool ImageDecodeController::CanHandleFilterQuality( |
110 if (layer_it == image_decode_tasks_.end()) | 282 SkFilterQuality filter_quality) { |
111 return; | 283 DCHECK(filter_quality != kNone_SkFilterQuality); |
112 | 284 // We don't need to handle low quality filters. |
113 ImageTaskMap& image_tasks = layer_it->second; | 285 if (filter_quality == kLow_SkFilterQuality) |
114 ImageTaskMap::iterator task_it = image_tasks.find(image->uniqueID()); | 286 return false; |
115 if (task_it == image_tasks.end()) | 287 // TODO(vmpstr): We need to start caching mipmaps for medium quality and |
116 return; | 288 // caching the interpolated values from those. For now, we don't have this. |
117 image_tasks.erase(task_it); | 289 if (filter_quality == kMedium_SkFilterQuality) |
| 290 return false; |
| 291 DCHECK(filter_quality == kHigh_SkFilterQuality); |
| 292 return true; |
| 293 } |
| 294 |
| 295 void ImageDecodeController::ReduceCacheUsage() { |
| 296 base::AutoLock lock(lock_); |
| 297 size_t num_to_remove = (decoded_images_.size() > kMaxItemsInCache) |
| 298 ? (decoded_images_.size() - kMaxItemsInCache) |
| 299 : 0; |
| 300 for (auto it = decoded_images_.begin(); |
| 301 num_to_remove != 0 && it != decoded_images_.end();) { |
| 302 if (it->second->is_locked()) { |
| 303 ++it; |
| 304 continue; |
| 305 } |
| 306 |
| 307 it = decoded_images_.erase(it); |
| 308 --num_to_remove; |
| 309 } |
| 310 } |
| 311 |
| 312 // ImageDecodeControllerKey |
| 313 ImageDecodeControllerKey ImageDecodeControllerKey::FromDrawImage( |
| 314 const DrawImage& image) { |
| 315 gfx::Size target_size(std::abs(SkScalarRoundToInt(image.image()->width() * |
| 316 image.scale().width())), |
| 317 std::abs(SkScalarRoundToInt(image.image()->height() * |
| 318 image.scale().height()))); |
| 319 return ImageDecodeControllerKey(image.image()->uniqueID(), target_size, |
| 320 image.filter_quality()); |
| 321 } |
| 322 |
| 323 ImageDecodeControllerKey::ImageDecodeControllerKey( |
| 324 uint32_t image_id, |
| 325 const gfx::Size& size, |
| 326 SkFilterQuality filter_quality) |
| 327 : image_id_(image_id), size_(size), filter_quality_(filter_quality) {} |
| 328 |
| 329 std::string ImageDecodeControllerKey::ToString() const { |
| 330 std::ostringstream str; |
| 331 str << "id[" << image_id_ << "] size[" << size_.width() << "x" |
| 332 << size_.height() << "] filter_quality[" << filter_quality_ << "]"; |
| 333 return str.str(); |
| 334 } |
| 335 |
| 336 // TODO(vmpstr): Not sure if this needs to do something. It could have some |
| 337 // extra state to DCHECK if we pass it DecodedImage object as the context. |
| 338 static void noop(const void* pixels, void* context) {} |
| 339 |
| 340 // DecodedImage |
| 341 ImageDecodeController::DecodedImage::DecodedImage( |
| 342 const SkImageInfo& info, |
| 343 scoped_ptr<base::DiscardableMemory> memory) |
| 344 : locked_(true), image_info_(info), memory_(memory.Pass()) { |
| 345 CreateImageFromLockedMemory(); |
| 346 } |
| 347 |
| 348 ImageDecodeController::DecodedImage::~DecodedImage() {} |
| 349 |
| 350 bool ImageDecodeController::DecodedImage::Lock() { |
| 351 DCHECK(!locked_); |
| 352 bool success = memory_->Lock(); |
| 353 if (!success) |
| 354 return false; |
| 355 locked_ = true; |
| 356 CreateImageFromLockedMemory(); |
| 357 return true; |
| 358 } |
| 359 |
| 360 void ImageDecodeController::DecodedImage::Unlock() { |
| 361 DCHECK(locked_); |
| 362 image_.clear(); |
| 363 memory_->Unlock(); |
| 364 locked_ = false; |
| 365 } |
| 366 |
| 367 void ImageDecodeController::DecodedImage::CreateImageFromLockedMemory() { |
| 368 image_ = skia::AdoptRef(SkImage::NewFromRaster( |
| 369 image_info_, memory_->data(), image_info_.minRowBytes(), &noop, nullptr)); |
| 370 } |
| 371 |
| 372 // MemoryBudget |
| 373 ImageDecodeController::MemoryBudget::MemoryBudget(size_t limit_bytes) |
| 374 : limit_bytes_(limit_bytes), current_usage_bytes_(0u) {} |
| 375 |
| 376 size_t ImageDecodeController::MemoryBudget::AvailableMemoryBytes() const { |
| 377 size_t usage = GetCurrentUsageSafe(); |
| 378 return usage >= limit_bytes_ ? 0u : (limit_bytes_ - usage); |
| 379 } |
| 380 |
| 381 void ImageDecodeController::MemoryBudget::AddUsage(size_t usage) { |
| 382 current_usage_bytes_ += usage; |
| 383 } |
| 384 |
| 385 void ImageDecodeController::MemoryBudget::SubtractUsage(size_t usage) { |
| 386 DCHECK_GE(current_usage_bytes_.ValueOrDefault(0u), usage); |
| 387 current_usage_bytes_ -= usage; |
| 388 } |
| 389 |
| 390 void ImageDecodeController::MemoryBudget::ResetUsage() { |
| 391 current_usage_bytes_ = 0; |
| 392 } |
| 393 |
| 394 bool ImageDecodeController::MemoryBudget::NeedsReset() const { |
| 395 return !current_usage_bytes_.IsValid(); |
| 396 } |
| 397 |
| 398 size_t ImageDecodeController::MemoryBudget::GetCurrentUsageSafe() const { |
| 399 return current_usage_bytes_.ValueOrDefault( |
| 400 std::numeric_limits<size_t>::max()); |
118 } | 401 } |
119 | 402 |
120 } // namespace cc | 403 } // namespace cc |
OLD | NEW |