Index: cc/tiles/image_decode_controller.cc |
diff --git a/cc/tiles/image_decode_controller.cc b/cc/tiles/image_decode_controller.cc |
index 70b6dc789257332e8a097b0c8d7158bc060c666e..9c44217c3e223d3dad1ff62b36c10f1313d015e9 100644 |
--- a/cc/tiles/image_decode_controller.cc |
+++ b/cc/tiles/image_decode_controller.cc |
@@ -4,20 +4,32 @@ |
#include "cc/tiles/image_decode_controller.h" |
+#include "base/memory/discardable_memory.h" |
#include "cc/debug/devtools_instrumentation.h" |
+#include "third_party/skia/include/core/SkCanvas.h" |
namespace cc { |
namespace { |
+// The amount of memory we can lock ahead of time (100MB). This limit is only |
+// used to inform the caller of the amount of space available in the cache. The |
+// caller can still request tasks which can cause this limit to be breached. |
+const size_t kLockedMemoryLimitBytes = 100 * 1024 * 1024; |
+ |
+// The number of entries to keep around in the cache. This limit can be breached |
+// if more items are locked. That is, locked items ignore this limit. |
+const size_t kMaxItemsInCache = 100; |
+ |
class ImageDecodeTaskImpl : public ImageDecodeTask { |
public: |
ImageDecodeTaskImpl(ImageDecodeController* controller, |
- const SkImage* image, |
- int layer_id, |
+ const ImageDecodeController::ImageKey& image_key, |
+ const DrawImage& image, |
uint64_t source_prepare_tiles_id) |
: controller_(controller), |
- image_(skia::SharePtr(image)), |
- layer_id_(layer_id), |
+ image_key_(image_key), |
+ image_(image), |
+ image_ref_(skia::SharePtr(image.image())), |
source_prepare_tiles_id_(source_prepare_tiles_id) {} |
// Overridden from Task: |
@@ -25,19 +37,14 @@ class ImageDecodeTaskImpl : public ImageDecodeTask { |
TRACE_EVENT1("cc", "ImageDecodeTaskImpl::RunOnWorkerThread", |
"source_prepare_tiles_id", source_prepare_tiles_id_); |
devtools_instrumentation::ScopedImageDecodeTask image_decode_task( |
- image_.get()); |
- controller_->DecodeImage(image_.get()); |
- |
- // Release the reference after decoding image to ensure that it is not kept |
- // alive unless needed. |
- image_.clear(); |
+ image_ref_.get()); |
+ controller_->DecodeImage(image_key_, image_); |
} |
// Overridden from TileTask: |
void ScheduleOnOriginThread(TileTaskClient* client) override {} |
void CompleteOnOriginThread(TileTaskClient* client) override { |
- controller_->OnImageDecodeTaskCompleted(layer_id_, image_.get(), |
- !HasFinishedRunning()); |
+ controller_->OnImageDecodeTaskCompleted(image_key_, !HasFinishedRunning()); |
} |
protected: |
@@ -45,8 +52,9 @@ class ImageDecodeTaskImpl : public ImageDecodeTask { |
private: |
ImageDecodeController* controller_; |
- skia::RefPtr<const SkImage> image_; |
- int layer_id_; |
+ ImageDecodeController::ImageKey image_key_; |
+ DrawImage image_; |
+ skia::RefPtr<const SkImage> image_ref_; |
uint64_t source_prepare_tiles_id_; |
DISALLOW_COPY_AND_ASSIGN(ImageDecodeTaskImpl); |
@@ -54,67 +62,407 @@ class ImageDecodeTaskImpl : public ImageDecodeTask { |
} // namespace |
-ImageDecodeController::ImageDecodeController() {} |
+ImageDecodeController::ImageDecodeController() |
+ : locked_images_budget_(kLockedMemoryLimitBytes) {} |
ImageDecodeController::~ImageDecodeController() {} |
-scoped_refptr<ImageDecodeTask> ImageDecodeController::GetTaskForImage( |
+scoped_refptr<ImageDecodeTask> ImageDecodeController::GetTaskForImageAndRef( |
const DrawImage& image, |
- int layer_id, |
uint64_t prepare_tiles_id) { |
- uint32_t generation_id = image.image()->uniqueID(); |
- scoped_refptr<ImageDecodeTask>& decode_task = |
- image_decode_tasks_[layer_id][generation_id]; |
- if (!decode_task) |
- decode_task = CreateTaskForImage(image.image(), layer_id, prepare_tiles_id); |
- return decode_task; |
+ // If the image already exist or if we're going to create a task for it, then |
+ // we need to ref this image. That means the image is or will be in the cache. |
+ // When the ref goes to 0, it will be unpinned but will remain in the cache. |
+ // If the image does not fit into the budget, then we don't ref this image, |
+ // since it will be decoded at raster time which is when it will be |
+ // temporarily put in the cache. |
+ ImageKey key = ImageKey::FromDrawImage(image); |
+ |
+ // Image already exists. |
+ if (LockDecodedImageIfPossibleAndRef(key)) { |
+ SanityCheckState(__LINE__, false); |
+ return nullptr; |
+ } |
+ |
+ base::AutoLock lock(lock_); |
+ |
+ scoped_refptr<ImageDecodeTask>& task = pending_image_tasks_[key]; |
+ if (task) { |
+ ++decoded_images_ref_counts_[key]; |
+ SanityCheckState(__LINE__, true); |
+ return task; |
+ } |
+ |
+ // Image won't fit into the cache, we'll scale it at raster time. |
+ if (locked_images_budget_.AvailableMemoryBytes() < key.target_bytes()) { |
+ SanityCheckState(__LINE__, true); |
+ return nullptr; |
enne (OOO)
2015/10/29 22:59:45
I kind of wish there was a way to differentiate th
|
+ } |
+ |
+ // We have account for memory usage when we create the task, not when the task |
+ // puts an image in the cache, because we need to ensure that we don't create |
+ // more tasks than we would have memory. That is, since tasks are created up |
+ // front, it will be too late to count memory when they run. |
+ locked_images_budget_.AddUsage(key.target_bytes()); |
+ |
+ task = CreateTaskForImage(key, image, prepare_tiles_id); |
+ |
+ ++decoded_images_ref_counts_[key]; |
+ SanityCheckState(__LINE__, true); |
+ return task; |
+} |
+ |
+void ImageDecodeController::UnrefImage(const DrawImage& image) { |
+ // When we unref the image, there are several situations we need to consider: |
+ // 1. This image was not even scheduled to be in the locked cache, which means |
+ // that the ref count doesn't even exist. |
+ // 2. The ref did not reach 0, which means we have to keep the image locked. |
+ // 3. The ref reached 0, we should unlock it. |
+ // 3a. The image isn't in the locked cache because we didn't get to decode |
+ // it yet (this is different from (1.) since it was actually scheduled |
+ // to be decoded). |
+ // 3b. Unlock the image but keep it in list. |
+ const ImageKey& key = ImageKey::FromDrawImage(image); |
+ |
+ base::AutoLock lock(lock_); |
+ auto ref_count_it = decoded_images_ref_counts_.find(key); |
+ if (ref_count_it == decoded_images_ref_counts_.end()) { |
+ SanityCheckState(__LINE__, true); |
+ return; |
+ } |
+ |
+ --ref_count_it->second; |
+ if (ref_count_it->second == 0) { |
+ decoded_images_ref_counts_.erase(key); |
+ locked_images_budget_.SubtractUsage(key.target_bytes()); |
+ |
+ auto decoded_image_it = |
+ std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& decoded_image) { |
+ return key == decoded_image.first; |
+ }); |
+ // If we've never decoded the image before ref reached 0, then we wouldn't |
+ // have it in our cache. This would happen if we canceled tasks. |
+ if (decoded_image_it == decoded_images_.end()) { |
+ SanityCheckState(__LINE__, true); |
+ return; |
+ } |
+ DCHECK(decoded_image_it->second->is_locked()); |
+ decoded_image_it->second->Unlock(); |
+ } |
+ SanityCheckState(__LINE__, true); |
+} |
+ |
+bool ImageDecodeController::LockDecodedImageIfPossibleAndRef( |
+ const ImageKey& key) { |
+ base::AutoLock lock(lock_); |
+ auto decoded_images_iterator = |
+ std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& annotated_image) { |
+ return key == annotated_image.first; |
+ }); |
+ if (decoded_images_iterator == decoded_images_.end()) { |
+ SanityCheckState(__LINE__, true); |
+ return false; |
+ } |
+ |
+ AnnotatedDecodedImage decoded_image = *decoded_images_iterator; |
+ decoded_images_.erase(decoded_images_iterator); |
+ |
+ // Figure out if the image is locked or try to lock it. |
+ bool locked = decoded_image.second->is_locked(); |
+ if (!locked) { |
+ locked = decoded_image.second->Lock(); |
+ if (!locked) { |
+ SanityCheckState(__LINE__, true); |
+ return false; |
+ } |
+ |
+ locked_images_budget_.AddUsage(key.target_bytes()); |
+ } |
+ |
+ // If the image is locked or we locked it, then it has to be in the cache. |
+ DCHECK(std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& image) { |
+ return image.first == key; |
+ }) == decoded_images_.end()); |
+ decoded_images_.push_back(decoded_image); |
+ ++decoded_images_ref_counts_[key]; |
+ SanityCheckState(__LINE__, true); |
+ return true; |
} |
scoped_refptr<ImageDecodeTask> ImageDecodeController::CreateTaskForImage( |
- const SkImage* image, |
- int layer_id, |
+ const ImageKey& key, |
+ const DrawImage& image, |
uint64_t prepare_tiles_id) { |
return make_scoped_refptr( |
- new ImageDecodeTaskImpl(this, image, layer_id, prepare_tiles_id)); |
+ new ImageDecodeTaskImpl(this, key, image, prepare_tiles_id)); |
} |
-void ImageDecodeController::DecodeImage(const SkImage* image) { |
- image->preroll(); |
-} |
+void ImageDecodeController::DecodeImage(const ImageKey& key, |
+ const DrawImage& image) { |
+ if (!CanHandleFilterQuality(image.filter_quality())) |
+ return; |
+ |
+ scoped_refptr<DecodedImage> decoded_image = |
+ DecodeImageInternal(key, image.image()); |
-void ImageDecodeController::AddLayerUsedCount(int layer_id) { |
- ++used_layer_counts_[layer_id]; |
+ // Add the image to the cache. Don't add the budget usage, since it was |
+ // already handled by the code that created the task for this decode. |
+ base::AutoLock lock(lock_); |
+ |
+ // We could have finished all of the raster tasks (cancelled) while this image |
+ // decode task was running, which means that we now have a locked image but no |
+ // ref counts. Unlock it immediately in this case. |
+ if (decoded_images_ref_counts_.find(key) == decoded_images_ref_counts_.end()) |
+ decoded_image->Unlock(); |
+ |
+ DCHECK(std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& image) { |
+ return image.first == key; |
+ }) == decoded_images_.end()); |
+ decoded_images_.push_back(AnnotatedDecodedImage(key, decoded_image)); |
} |
-void ImageDecodeController::SubtractLayerUsedCount(int layer_id) { |
- if (--used_layer_counts_[layer_id]) |
- return; |
+scoped_refptr<ImageDecodeController::DecodedImage> |
+ImageDecodeController::DecodeImageInternal(const ImageKey& key, |
+ const SkImage* image) { |
+ // TODO(vmpstr, reed): Scale the image here without caching it in skia. |
+ SkImageInfo info = SkImageInfo::MakeN32Premul(key.target_size().width(), |
+ key.target_size().height()); |
+ scoped_ptr<base::DiscardableMemory> locked_memory = |
+ base::DiscardableMemoryAllocator::GetInstance() |
+ ->AllocateLockedDiscardableMemory(info.minRowBytes() * info.height()); |
- // Clean up decode tasks once a layer is no longer used. |
- used_layer_counts_.erase(layer_id); |
- image_decode_tasks_.erase(layer_id); |
+ skia::RefPtr<SkCanvas> canvas = skia::AdoptRef(SkCanvas::NewRasterDirect( |
+ info, locked_memory->data(), info.minRowBytes())); |
+ canvas->setMatrix(SkMatrix::MakeRectToRect( |
+ SkRect::MakeWH(image->width(), image->height()), |
+ SkRect::MakeWH(key.target_size().width(), key.target_size().height()), |
+ SkMatrix::kFill_ScaleToFit)); |
+ canvas->clear(SK_ColorTRANSPARENT); |
+ SkPaint paint; |
+ paint.setFilterQuality(kHigh_SkFilterQuality); |
+ canvas->drawImage(image, 0, 0, &paint); |
+ canvas->flush(); |
+ |
+ return make_scoped_refptr(new DecodedImage(info, locked_memory.Pass())); |
} |
-void ImageDecodeController::OnImageDecodeTaskCompleted(int layer_id, |
- const SkImage* image, |
+void ImageDecodeController::OnImageDecodeTaskCompleted(const ImageKey& key, |
bool was_canceled) { |
- // If the task has successfully finished, then keep the task until the layer |
- // is no longer in use. This ensures that we only decode a image once. |
- // TODO(vmpstr): Remove this when decode lifetime is controlled by cc. |
- if (!was_canceled) |
- return; |
+ pending_image_tasks_.erase(key); |
+ SanityCheckState(__LINE__, false); |
+} |
+ |
+DecodedDrawImage ImageDecodeController::GetDecodedImageAndRef( |
+ const DrawImage& image) { |
+ if (!CanHandleFilterQuality(image.filter_quality())) { |
+ return DecodedDrawImage(image.image(), SkSize::Make(1.f, 1.f), |
+ image.filter_quality()); |
+ } |
+ |
+ ImageKey key = ImageKey::FromDrawImage(image); |
+ |
+ base::AutoLock lock(lock_); |
+ auto decoded_images_iterator = |
+ std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& annotated_image) { |
+ return key == annotated_image.first; |
+ }); |
+ scoped_refptr<DecodedImage> decoded_image; |
+ if (decoded_images_iterator != decoded_images_.end()) { |
+ decoded_image = decoded_images_iterator->second; |
+ if (!decoded_image->is_locked()) { |
+ if (decoded_image->Lock()) { |
+ locked_images_budget_.AddUsage(key.target_bytes()); |
+ } else { |
+ decoded_images_.erase(decoded_images_iterator); |
+ decoded_image = nullptr; |
+ } |
+ } |
+ } |
- // Otherwise, we have to clean up the task so that a new one can be created if |
- // we need to decode the image again. |
- LayerImageTaskMap::iterator layer_it = image_decode_tasks_.find(layer_id); |
- if (layer_it == image_decode_tasks_.end()) |
+ if (!decoded_image) { |
+ // This means that we didn't have an image task to decode this (otherwise it |
+ // would have run and locked the image already). So, we need to decode/scale |
+ // in place. |
+ decoded_image = DecodeImageInternal(key, image.image()); |
+ DCHECK(std::find_if(decoded_images_.begin(), decoded_images_.end(), |
+ [key](const AnnotatedDecodedImage& image) { |
+ return image.first == key; |
+ }) == decoded_images_.end()); |
+ decoded_images_.push_back(AnnotatedDecodedImage(key, decoded_image)); |
+ locked_images_budget_.AddUsage(key.target_bytes()); |
+ } |
+ |
+ DCHECK(decoded_image->is_locked()) << key.ToString(); |
+ float x_scale = |
+ key.target_size().width() / static_cast<float>(image.image()->width()); |
+ float y_scale = |
+ key.target_size().height() / static_cast<float>(image.image()->height()); |
+ |
+ ++decoded_images_ref_counts_[key]; |
+ SanityCheckState(__LINE__, true); |
+ return DecodedDrawImage(decoded_image->image(), |
+ SkSize::Make(x_scale, y_scale), kLow_SkFilterQuality); |
+} |
+ |
+void ImageDecodeController::DrawWithImageFinished(const DrawImage& image) { |
+ if (!CanHandleFilterQuality(image.filter_quality())) |
return; |
+ UnrefImage(image); |
+ SanityCheckState(__LINE__, false); |
+} |
+ |
+bool ImageDecodeController::CanHandleFilterQuality( |
+ SkFilterQuality filter_quality) { |
+ DCHECK(filter_quality != kNone_SkFilterQuality); |
+ // We don't need to handle low quality filters. |
+ if (filter_quality == kLow_SkFilterQuality) |
+ return false; |
+ // TODO(vmpstr): We need to start caching mipmaps for medium quality and |
+ // caching the interpolated values from those. For now, we don't have this. |
+ if (filter_quality == kMedium_SkFilterQuality) |
+ return false; |
+ DCHECK(filter_quality == kHigh_SkFilterQuality); |
+ return true; |
+} |
+ |
+void ImageDecodeController::ReduceCacheUsage() { |
+ base::AutoLock lock(lock_); |
+ size_t num_to_remove = (decoded_images_.size() > kMaxItemsInCache) |
+ ? (decoded_images_.size() - kMaxItemsInCache) |
+ : 0; |
+ for (auto it = decoded_images_.begin(); |
+ num_to_remove != 0 && it != decoded_images_.end();) { |
+ if (it->second->is_locked()) { |
+ ++it; |
+ continue; |
+ } |
+ |
+ it = decoded_images_.erase(it); |
+ --num_to_remove; |
+ } |
+} |
- ImageTaskMap& image_tasks = layer_it->second; |
- ImageTaskMap::iterator task_it = image_tasks.find(image->uniqueID()); |
- if (task_it == image_tasks.end()) |
+void ImageDecodeController::SanityCheckState(int line, bool lock_acquired) { |
+#if DCHECK_IS_ON() |
+ if (!lock_acquired) { |
+ base::AutoLock lock(lock_); |
+ SanityCheckState(line, true); |
return; |
- image_tasks.erase(task_it); |
+ } |
+ |
+ MemoryBudget budget(kLockedMemoryLimitBytes); |
+ for (const auto& annotated_image : decoded_images_) { |
+ auto ref_it = decoded_images_ref_counts_.find(annotated_image.first); |
+ if (annotated_image.second->is_locked()) { |
+ budget.AddUsage(annotated_image.first.target_bytes()); |
+ DCHECK(ref_it != decoded_images_ref_counts_.end()) << line; |
+ } else { |
+ DCHECK(ref_it == decoded_images_ref_counts_.end() || |
+ pending_image_tasks_.find(annotated_image.first) != |
+ pending_image_tasks_.end()) |
+ << line; |
+ } |
+ } |
+ DCHECK_GE(budget.AvailableMemoryBytes(), |
+ locked_images_budget_.AvailableMemoryBytes()) |
+ << line; |
+#endif // DCHECK_IS_ON() |
+} |
+ |
+// ImageDecodeControllerKey |
+ImageDecodeControllerKey ImageDecodeControllerKey::FromDrawImage( |
+ const DrawImage& image) { |
+ gfx::Size target_size(std::abs(SkScalarRoundToInt(image.image()->width() * |
+ image.scale().width())), |
+ std::abs(SkScalarRoundToInt(image.image()->height() * |
+ image.scale().height()))); |
+ return ImageDecodeControllerKey(image.image()->uniqueID(), target_size, |
+ image.filter_quality()); |
+} |
+ |
+ImageDecodeControllerKey::ImageDecodeControllerKey( |
+ uint32_t image_id, |
+ const gfx::Size& size, |
+ SkFilterQuality filter_quality) |
+ : image_id_(image_id), size_(size), filter_quality_(filter_quality) {} |
+ |
+std::string ImageDecodeControllerKey::ToString() const { |
+ std::ostringstream str; |
+ str << "id[" << image_id_ << "] size[" << size_.width() << "x" |
+ << size_.height() << "] filter_quality[" << filter_quality_ << "]"; |
+ return str.str(); |
+} |
+ |
+// TODO(vmpstr): Not sure if this needs to do something. It could have some |
+// extra state to DCHECK if we pass it DecodedImage object as the context. |
+static void noop(const void* pixels, void* context) {} |
+ |
+// DecodedImage |
+ImageDecodeController::DecodedImage::DecodedImage( |
+ const SkImageInfo& info, |
+ scoped_ptr<base::DiscardableMemory> memory) |
+ : locked_(true), image_info_(info), memory_(memory.Pass()) { |
+ CreateImageFromLockedMemory(); |
+} |
+ |
+ImageDecodeController::DecodedImage::~DecodedImage() {} |
+ |
+bool ImageDecodeController::DecodedImage::Lock() { |
+ DCHECK(!locked_); |
+ bool success = memory_->Lock(); |
+ if (!success) |
+ return false; |
+ locked_ = true; |
+ CreateImageFromLockedMemory(); |
+ return true; |
+} |
+ |
+void ImageDecodeController::DecodedImage::Unlock() { |
+ DCHECK(locked_); |
+ image_.clear(); |
+ memory_->Unlock(); |
+ locked_ = false; |
+} |
+ |
+void ImageDecodeController::DecodedImage::CreateImageFromLockedMemory() { |
+ image_ = skia::AdoptRef(SkImage::NewFromRaster( |
+ image_info_, memory_->data(), image_info_.minRowBytes(), &noop, nullptr)); |
+} |
+ |
+// MemoryBudget |
+ImageDecodeController::MemoryBudget::MemoryBudget(size_t limit_bytes) |
+ : limit_bytes_(limit_bytes), current_usage_bytes_(0u) {} |
+ |
+size_t ImageDecodeController::MemoryBudget::AvailableMemoryBytes() const { |
+ size_t usage = GetCurrentUsageSafe(); |
+ return usage >= limit_bytes_ ? 0u : (limit_bytes_ - usage); |
+} |
+ |
+void ImageDecodeController::MemoryBudget::AddUsage(size_t usage) { |
+ current_usage_bytes_ += usage; |
+} |
+ |
+void ImageDecodeController::MemoryBudget::SubtractUsage(size_t usage) { |
+ DCHECK_GE(current_usage_bytes_.ValueOrDefault(0u), usage); |
+ current_usage_bytes_ -= usage; |
+} |
+ |
+void ImageDecodeController::MemoryBudget::ResetUsage() { |
+ current_usage_bytes_ = 0; |
+} |
+ |
+bool ImageDecodeController::MemoryBudget::NeedsReset() const { |
+ return !current_usage_bytes_.IsValid(); |
+} |
+ |
+size_t ImageDecodeController::MemoryBudget::GetCurrentUsageSafe() const { |
+ return current_usage_bytes_.ValueOrDefault( |
+ std::numeric_limits<size_t>::max()); |
} |
} // namespace cc |