Chromium Code Reviews| Index: base/memory/discardable_memory_android.cc |
| diff --git a/base/memory/discardable_memory_android.cc b/base/memory/discardable_memory_android.cc |
| index 7e84967055f1ba9d35f8c6efaeb84b12833586ad..e127047685daf79f8ac9d5fd337e0a0f538fa7f1 100644 |
| --- a/base/memory/discardable_memory_android.cc |
| +++ b/base/memory/discardable_memory_android.cc |
| @@ -18,6 +18,7 @@ |
| #include "base/logging.h" |
| #include "base/memory/discardable_memory.h" |
| #include "base/memory/discardable_memory_allocator_android.h" |
| +#include "base/memory/discardable_memory_emulated.h" |
| #include "base/synchronization/lock.h" |
| #include "third_party/ashmem/ashmem.h" |
| @@ -118,6 +119,16 @@ bool CheckSizeCanBeAlignedToNextPage(size_t size) { |
| return size <= std::numeric_limits<size_t>::max() - kPageSize + 1; |
| } |
| +struct SupportedTypeVector { |
|
Philippe
2013/12/17 14:28:21
This is just a suggestion but I see that we have n
reveman
2013/12/18 08:12:38
Please have a look at the latest code. I simply ma
Philippe
2013/12/18 09:07:47
Yeah, even better!
|
| + SupportedTypeVector() { |
| + v.push_back(DISCARDABLE_MEMORY_ANDROID); |
| + v.push_back(DISCARDABLE_MEMORY_EMULATED); |
| + } |
| + std::vector<DiscardableMemoryType> v; |
| +}; |
| +LazyInstance<SupportedTypeVector>::Leaky g_supported_types = |
| + LAZY_INSTANCE_INITIALIZER; |
| + |
| } // namespace |
| namespace internal { |
| @@ -199,8 +210,9 @@ bool UnlockAshmemRegion(int fd, size_t off, size_t size, const void* address) { |
| } // namespace internal |
| // static |
| -bool DiscardableMemory::SupportedNatively() { |
| - return true; |
| +const std::vector<DiscardableMemoryType>& |
| + DiscardableMemory::GetSupportedTypes() { |
| + return g_supported_types.Get().v; |
| } |
| // Allocation can happen in two ways: |
| @@ -221,29 +233,48 @@ bool DiscardableMemory::SupportedNatively() { |
| // static |
| scoped_ptr<DiscardableMemory> DiscardableMemory::CreateLockedMemory( |
| size_t size) { |
| - if (!CheckSizeCanBeAlignedToNextPage(size)) |
| - return scoped_ptr<DiscardableMemory>(); |
| - // Pinning & unpinning works with page granularity therefore align the size |
| - // upfront. |
| - const size_t aligned_size = internal::AlignToNextPage(size); |
| - // Note that the following code is slightly racy. The worst that can happen in |
| - // practice though is taking the wrong decision (e.g. using the allocator |
| - // rather than DiscardableMemoryAndroidSimple). Moreover keeping the lock |
| - // acquired for the whole allocation would cause a deadlock when the allocator |
| - // tries to create an ashmem region. |
| - const size_t kAllocatorRegionSize = |
| - internal::DiscardableMemoryAllocator::kMinAshmemRegionSize; |
| - GlobalContext* const global_context = g_context.Pointer(); |
| - if (aligned_size >= kAllocatorRegionSize || |
| - GetCurrentNumberOfAshmemFDs() < 0.9 * global_context->ashmem_fd_limit) { |
| - int fd; |
| - void* address; |
| - if (internal::CreateAshmemRegion("", aligned_size, &fd, &address)) { |
| - return scoped_ptr<DiscardableMemory>( |
| - new DiscardableMemoryAndroidSimple(fd, address, aligned_size)); |
| + switch (GetType()) { |
| + case DISCARDABLE_MEMORY_NONE: |
| + case DISCARDABLE_MEMORY_MAC: |
| + return scoped_ptr<DiscardableMemory>(); |
| + case DISCARDABLE_MEMORY_ANDROID: { |
| + if (!CheckSizeCanBeAlignedToNextPage(size)) |
| + return scoped_ptr<DiscardableMemory>(); |
| + // Pinning & unpinning works with page granularity therefore align the |
| + // size upfront. |
| + const size_t aligned_size = internal::AlignToNextPage(size); |
| + // Note that the following code is slightly racy. The worst that can |
| + // happen in practice though is taking the wrong decision (e.g. using |
| + // the allocator rather than DiscardableMemoryAndroidSimple). Moreover |
| + // keeping the lock acquired for the whole allocation would cause a |
| + // deadlock when the allocator tries to create an ashmem region. |
| + const size_t kAllocatorRegionSize = |
| + internal::DiscardableMemoryAllocator::kMinAshmemRegionSize; |
| + GlobalContext* const global_context = g_context.Pointer(); |
| + if (aligned_size >= kAllocatorRegionSize || |
| + GetCurrentNumberOfAshmemFDs() < |
| + 0.9 * global_context->ashmem_fd_limit) { |
| + int fd; |
| + void* address; |
| + if (internal::CreateAshmemRegion("", aligned_size, &fd, &address)) { |
| + return scoped_ptr<DiscardableMemory>( |
| + new DiscardableMemoryAndroidSimple(fd, address, aligned_size)); |
| + } |
| + } |
| + return global_context->allocator.Allocate(size); |
| + } |
| + case DISCARDABLE_MEMORY_EMULATED: { |
| + scoped_ptr<internal::DiscardableMemoryEmulated> memory( |
| + new internal::DiscardableMemoryEmulated(size)); |
| + if (!memory->Initialize()) |
|
Philippe
2013/12/17 14:28:21
I have to say that I look forward to seeing Discar
Philippe
2013/12/17 15:26:22
Just realized that this may sound harsh. Sorry if
reveman
2013/12/18 08:12:38
DiscardableMemoryEmulated is likely not going to b
Philippe
2013/12/18 09:07:47
Those things are personal preferences obviously an
|
| + return scoped_ptr<DiscardableMemory>(); |
| + |
| + return memory.PassAs<DiscardableMemory>(); |
| } |
| } |
| - return global_context->allocator.Allocate(size); |
| + |
| + NOTREACHED(); |
| + return scoped_ptr<DiscardableMemory>(); |
| } |
| // static |