Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2127)

Unified Diff: android_webview/browser/global_tile_manager.cc

Issue 226363004: Global GPU memory manager for android webview (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: address Bo's comments Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: android_webview/browser/global_tile_manager.cc
diff --git a/android_webview/browser/global_tile_manager.cc b/android_webview/browser/global_tile_manager.cc
new file mode 100644
index 0000000000000000000000000000000000000000..2eb41eb4b6f9f8cba149e32a9586b9d459e6b1a5
--- /dev/null
+++ b/android_webview/browser/global_tile_manager.cc
@@ -0,0 +1,126 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "android_webview/browser/global_tile_manager.h"
+#include "android_webview/browser/global_tile_manager_client.h"
+#include "base/lazy_instance.h"
+
+namespace android_webview {
+
+namespace {
+base::LazyInstance<GlobalTileManager>::Leaky g_tile_manager =
+ LAZY_INSTANCE_INITIALIZER;
+
+// The soft limit of the number of file descriptors per process is 1024 on
+// Android and gralloc buffers may not be the only thing that uses file
+// descriptors. For each tile, there is a gralloc buffer backing it, which
+// uses 2 FDs.
+const size_t kNumTilesLimit = 450;
+} // namespace
+
+// static
+GlobalTileManager* GlobalTileManager::GetInstance() {
+ return g_tile_manager.Pointer();
+}
+
+void GlobalTileManager::Remove(Key key) {
+ DCHECK(mru_list_.end() != key);
+ if (key == inactive_views_)
+ inactive_views_++;
+ mru_list_.erase(key);
+}
+
+size_t GlobalTileManager::Evict(size_t desired_num_tiles, Key key) {
+ size_t total_evicted_tiles = 0;
+
+ // Evicts from the least recent drawn view, until the disired number of tiles
+ // can be reclaimed, or until we've evicted all inactive views.
+ for (Key it = inactive_views_; it != mru_list_.end(); it++) {
boliu 2014/05/01 00:37:40 No...I was wrong. You should use rbegin, but *sto
hush (inactive) 2014/05/01 19:03:04 I will backwards traverse from the end of the list
+ // key represents the view that requested the eviction, so we don't need to
+ // evict the requester itself.
+ if (*it == *key)
+ continue;
+
+ size_t evicted_tiles = (*it)->ForceDropTiles();
+ total_evicted_tiles += evicted_tiles;
+ if (total_evicted_tiles >= desired_num_tiles)
+ break;
+ }
+
+ return total_evicted_tiles;
+}
+
+size_t GlobalTileManager::RequestTiles(size_t old_num_of_tiles,
+ size_t new_num_of_tiles,
+ bool is_starved,
boliu 2014/05/01 00:37:40 var name mismatch with header, so are you keeping
hush (inactive) 2014/05/01 19:03:04 I will change the bool name to needs_eviction On 2
+ Key key) {
+ DCHECK(CheckConsistency());
+ size_t num_of_active_views =
+ std::distance(mru_list_.begin(), inactive_views_);
+ size_t tiles_per_view_limit;
+ if (num_of_active_views == 0)
+ tiles_per_view_limit = kNumTilesLimit;
+ else
+ tiles_per_view_limit = kNumTilesLimit / num_of_active_views;
+ new_num_of_tiles = std::min(new_num_of_tiles, tiles_per_view_limit);
+ size_t new_total_allocated_tiles =
+ total_allocated_tiles_ - old_num_of_tiles + new_num_of_tiles;
+ // Has enough tiles to satisfy the request.
+ if (new_total_allocated_tiles <= kNumTilesLimit) {
+ total_allocated_tiles_ = new_total_allocated_tiles;
+ return new_num_of_tiles;
+ }
+
+ // Does not have enough tiles, but the request is not starved. Gives what's
+ // left to the requester.
+ if (!is_starved) {
+ size_t tiles_left = kNumTilesLimit - total_allocated_tiles_;
+ total_allocated_tiles_ = kNumTilesLimit;
+ return tiles_left + old_num_of_tiles;
+ }
+
+ // Does not have enough tiles and the requester is starved.
+ size_t evicted_tiles = Evict(new_total_allocated_tiles - kNumTilesLimit, key);
+ new_total_allocated_tiles -= evicted_tiles;
+ total_allocated_tiles_ = new_total_allocated_tiles;
+ return new_num_of_tiles;
+}
+
+GlobalTileManager::Key GlobalTileManager::PushBack(
+ GlobalTileManagerClient* client) {
+ DCHECK(mru_list_.end() ==
+ std::find(mru_list_.begin(), mru_list_.end(), client));
+ mru_list_.push_back(client);
+ Key back = mru_list_.end();
+ back--;
+ return back;
+}
+
+void GlobalTileManager::DidUse(Key key) {
+ DCHECK(mru_list_.end() != key);
+
+ inactive_views_ = key;
+ inactive_views_++;
boliu 2014/05/01 00:37:40 Ehh, I thought we could do this without keeping in
hush (inactive) 2014/05/01 19:03:04 Manager needs a pointer to inactive_views_ anyways
+ mru_list_.splice(mru_list_.begin(), mru_list_, key);
+}
+
+GlobalTileManager::GlobalTileManager() {
+ total_allocated_tiles_ = 0;
+ inactive_views_ = mru_list_.end();
+}
+
+GlobalTileManager::~GlobalTileManager() {
+}
+
+bool GlobalTileManager::CheckConsistency() const {
+ size_t total_tiles = 0;
+ ListType::const_iterator it;
+ for (it = mru_list_.begin(); it != mru_list_.end(); it++) {
+ total_tiles += (*it)->GetTilesNum();
+ }
+
+ return total_tiles == total_allocated_tiles_;
+}
+
+} // namespace webview

Powered by Google App Engine
This is Rietveld 408576698