Index: base/allocator/malloc_zone_functions_mac.cc |
diff --git a/base/allocator/malloc_zone_functions_mac.cc b/base/allocator/malloc_zone_functions_mac.cc |
index e64719da9e63fa485e2a3a6aca2aa7644e458fba..6a7a4f75d5f6754e6fd793fbb75fe5e5be980a03 100644 |
--- a/base/allocator/malloc_zone_functions_mac.cc |
+++ b/base/allocator/malloc_zone_functions_mac.cc |
@@ -10,11 +10,11 @@ |
namespace base { |
namespace allocator { |
-MallocZoneFunctions* g_malloc_zones = nullptr; |
-MallocZoneFunctions::MallocZoneFunctions() {} |
+MallocZoneFunctions g_malloc_zones[kMaxZoneCount]; |
void StoreZoneFunctions(const ChromeMallocZone* zone, |
MallocZoneFunctions* functions) { |
+ memset(functions, 0, sizeof(MallocZoneFunctions)); |
functions->malloc = zone->malloc; |
functions->calloc = zone->calloc; |
functions->valloc = zone->valloc; |
@@ -51,10 +51,6 @@ base::Lock& GetLock() { |
void EnsureMallocZonesInitializedLocked() { |
GetLock().AssertAcquired(); |
- if (!g_malloc_zones) { |
- g_malloc_zones = reinterpret_cast<base::allocator::MallocZoneFunctions*>( |
- calloc(kMaxZoneCount, sizeof(MallocZoneFunctions))); |
- } |
} |
int g_zone_count = 0; |
@@ -71,14 +67,14 @@ bool IsMallocZoneAlreadyStoredLocked(ChromeMallocZone* zone) { |
} // namespace |
-void StoreMallocZone(ChromeMallocZone* zone) { |
+bool StoreMallocZone(ChromeMallocZone* zone) { |
base::AutoLock l(GetLock()); |
EnsureMallocZonesInitializedLocked(); |
if (IsMallocZoneAlreadyStoredLocked(zone)) |
- return; |
+ return false; |
if (g_zone_count == kMaxZoneCount) |
- return; |
+ return false; |
StoreZoneFunctions(zone, &g_malloc_zones[g_zone_count]); |
++g_zone_count; |
@@ -87,6 +83,7 @@ void StoreMallocZone(ChromeMallocZone* zone) { |
// reads these values is triggered after this function returns. so we want to |
// guarantee that they are committed at this stage" |
base::subtle::MemoryBarrier(); |
+ return true; |
} |
bool IsMallocZoneAlreadyStored(ChromeMallocZone* zone) { |