OLD | NEW |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/allocator/allocator_shim.h" | 5 #include "base/allocator/allocator_shim.h" |
6 | 6 |
7 #include <malloc.h> | |
8 #include <stdlib.h> | 7 #include <stdlib.h> |
9 #include <string.h> | 8 #include <string.h> |
10 | 9 |
11 #include <memory> | 10 #include <memory> |
12 #include <new> | 11 #include <new> |
13 #include <vector> | 12 #include <vector> |
14 | 13 |
15 #include "base/allocator/features.h" | 14 #include "base/allocator/features.h" |
16 #include "base/atomicops.h" | 15 #include "base/atomicops.h" |
17 #include "base/process/process_metrics.h" | 16 #include "base/process/process_metrics.h" |
18 #include "base/synchronization/waitable_event.h" | 17 #include "base/synchronization/waitable_event.h" |
19 #include "base/threading/platform_thread.h" | 18 #include "base/threading/platform_thread.h" |
20 #include "base/threading/thread_local.h" | 19 #include "base/threading/thread_local.h" |
| 20 #include "build/build_config.h" |
21 #include "testing/gmock/include/gmock/gmock.h" | 21 #include "testing/gmock/include/gmock/gmock.h" |
22 #include "testing/gtest/include/gtest/gtest.h" | 22 #include "testing/gtest/include/gtest/gtest.h" |
23 | 23 |
24 #if defined(OS_WIN) | 24 #if defined(OS_WIN) |
25 #include <windows.h> | 25 #include <windows.h> |
| 26 #elif defined(OS_MACOSX) |
| 27 #include <malloc/malloc.h> |
| 28 #include "third_party/apple_apsl/malloc.h" |
26 #else | 29 #else |
| 30 #include <malloc.h> |
| 31 #endif |
| 32 |
| 33 #if !defined(OS_WIN) |
27 #include <unistd.h> | 34 #include <unistd.h> |
28 #endif | 35 #endif |
29 | 36 |
30 // Some new Android NDKs (64 bit) does not expose (p)valloc anymore. These | 37 // Some new Android NDKs (64 bit) does not expose (p)valloc anymore. These |
31 // functions are implemented at the shim-layer level. | 38 // functions are implemented at the shim-layer level. |
32 #if defined(OS_ANDROID) | 39 #if defined(OS_ANDROID) |
33 extern "C" { | 40 extern "C" { |
34 void* valloc(size_t size); | 41 void* valloc(size_t size); |
35 void* pvalloc(size_t size); | 42 void* pvalloc(size_t size); |
36 } | 43 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
76 if (alignment < kMaxSizeTracked) | 83 if (alignment < kMaxSizeTracked) |
77 ++(instance_->aligned_allocs_intercepted_by_alignment[alignment]); | 84 ++(instance_->aligned_allocs_intercepted_by_alignment[alignment]); |
78 } | 85 } |
79 return self->next->alloc_aligned_function(self->next, alignment, size); | 86 return self->next->alloc_aligned_function(self->next, alignment, size); |
80 } | 87 } |
81 | 88 |
82 static void* MockRealloc(const AllocatorDispatch* self, | 89 static void* MockRealloc(const AllocatorDispatch* self, |
83 void* address, | 90 void* address, |
84 size_t size) { | 91 size_t size) { |
85 if (instance_) { | 92 if (instance_) { |
86 // Address 0x420 is a special sentinel for the NewHandlerConcurrency test. | 93 // Size 0xFEED a special sentinel for the NewHandlerConcurrency test. |
87 // The first time (but only the first one) it is hit it fails, causing the | 94 // Hitting it for the first time will cause a failure, causing the |
88 // invocation of the std::new_handler. | 95 // invocation of the std::new_handler. |
89 if (address == reinterpret_cast<void*>(0x420)) { | 96 if (size == 0xFEED) { |
90 if (!instance_->did_fail_realloc_0x420_once->Get()) { | 97 if (!instance_->did_fail_realloc_0xfeed_once->Get()) { |
91 instance_->did_fail_realloc_0x420_once->Set(true); | 98 instance_->did_fail_realloc_0xfeed_once->Set(true); |
92 return nullptr; | 99 return nullptr; |
93 } else { | 100 } else { |
94 return reinterpret_cast<void*>(0x420ul); | 101 return address; |
95 } | 102 } |
96 } | 103 } |
97 | 104 |
98 if (size < kMaxSizeTracked) | 105 if (size < kMaxSizeTracked) |
99 ++(instance_->reallocs_intercepted_by_size[size]); | 106 ++(instance_->reallocs_intercepted_by_size[size]); |
100 ++instance_->reallocs_intercepted_by_addr[Hash(address)]; | 107 ++instance_->reallocs_intercepted_by_addr[Hash(address)]; |
101 } | 108 } |
102 return self->next->realloc_function(self->next, address, size); | 109 return self->next->realloc_function(self->next, address, size); |
103 } | 110 } |
104 | 111 |
105 static void MockFree(const AllocatorDispatch* self, void* address) { | 112 static void MockFree(const AllocatorDispatch* self, void* address) { |
106 if (instance_) { | 113 if (instance_) { |
107 ++instance_->frees_intercepted_by_addr[Hash(address)]; | 114 ++instance_->frees_intercepted_by_addr[Hash(address)]; |
108 } | 115 } |
109 self->next->free_function(self->next, address); | 116 self->next->free_function(self->next, address); |
110 } | 117 } |
111 | 118 |
| 119 static size_t MockGetSizeEstimate(const AllocatorDispatch* self, |
| 120 void* address) { |
| 121 return self->next->get_size_estimate_function(self->next, address); |
| 122 } |
| 123 |
| 124 static unsigned MockBatchMalloc(const AllocatorDispatch* self, |
| 125 size_t size, |
| 126 void** results, |
| 127 unsigned num_requested) { |
| 128 if (instance_) { |
| 129 instance_->batch_mallocs_intercepted_by_size[size] = |
| 130 instance_->batch_mallocs_intercepted_by_size[size] + num_requested; |
| 131 } |
| 132 return self->next->batch_malloc_function(self->next, size, results, |
| 133 num_requested); |
| 134 } |
| 135 |
| 136 static void MockBatchFree(const AllocatorDispatch* self, |
| 137 void** to_be_freed, |
| 138 unsigned num_to_be_freed) { |
| 139 if (instance_) { |
| 140 for (unsigned i = 0; i < num_to_be_freed; ++i) { |
| 141 ++instance_->batch_frees_intercepted_by_addr[Hash(to_be_freed[i])]; |
| 142 } |
| 143 } |
| 144 self->next->batch_free_function(self->next, to_be_freed, num_to_be_freed); |
| 145 } |
| 146 |
| 147 static void MockFreeDefiniteSize(const AllocatorDispatch* self, |
| 148 void* ptr, |
| 149 size_t size) { |
| 150 if (instance_) { |
| 151 ++instance_->frees_intercepted_by_addr[Hash(ptr)]; |
| 152 ++instance_->free_definite_sizes_intercepted_by_size[size]; |
| 153 } |
| 154 self->next->free_definite_size_function(self->next, ptr, size); |
| 155 } |
| 156 |
112 static void NewHandler() { | 157 static void NewHandler() { |
113 if (!instance_) | 158 if (!instance_) |
114 return; | 159 return; |
115 subtle::Barrier_AtomicIncrement(&instance_->num_new_handler_calls, 1); | 160 subtle::Barrier_AtomicIncrement(&instance_->num_new_handler_calls, 1); |
116 } | 161 } |
117 | 162 |
118 int32_t GetNumberOfNewHandlerCalls() { | 163 int32_t GetNumberOfNewHandlerCalls() { |
119 return subtle::Acquire_Load(&instance_->num_new_handler_calls); | 164 return subtle::Acquire_Load(&instance_->num_new_handler_calls); |
120 } | 165 } |
121 | 166 |
122 void SetUp() override { | 167 void SetUp() override { |
123 const size_t array_size = kMaxSizeTracked * sizeof(size_t); | 168 const size_t array_size = kMaxSizeTracked * sizeof(size_t); |
124 memset(&allocs_intercepted_by_size, 0, array_size); | 169 memset(&allocs_intercepted_by_size, 0, array_size); |
125 memset(&zero_allocs_intercepted_by_size, 0, array_size); | 170 memset(&zero_allocs_intercepted_by_size, 0, array_size); |
126 memset(&aligned_allocs_intercepted_by_size, 0, array_size); | 171 memset(&aligned_allocs_intercepted_by_size, 0, array_size); |
127 memset(&aligned_allocs_intercepted_by_alignment, 0, array_size); | 172 memset(&aligned_allocs_intercepted_by_alignment, 0, array_size); |
128 memset(&reallocs_intercepted_by_size, 0, array_size); | 173 memset(&reallocs_intercepted_by_size, 0, array_size); |
129 memset(&frees_intercepted_by_addr, 0, array_size); | 174 memset(&frees_intercepted_by_addr, 0, array_size); |
130 did_fail_realloc_0x420_once.reset(new ThreadLocalBoolean()); | 175 memset(&batch_mallocs_intercepted_by_size, 0, array_size); |
| 176 memset(&batch_frees_intercepted_by_addr, 0, array_size); |
| 177 memset(&free_definite_sizes_intercepted_by_size, 0, array_size); |
| 178 did_fail_realloc_0xfeed_once.reset(new ThreadLocalBoolean()); |
131 subtle::Release_Store(&num_new_handler_calls, 0); | 179 subtle::Release_Store(&num_new_handler_calls, 0); |
132 instance_ = this; | 180 instance_ = this; |
| 181 |
133 } | 182 } |
134 | 183 |
| 184 #if defined(OS_MACOSX) |
| 185 static void SetUpTestCase() { |
| 186 InitializeAllocatorShim(); |
| 187 } |
| 188 #endif |
| 189 |
135 void TearDown() override { instance_ = nullptr; } | 190 void TearDown() override { instance_ = nullptr; } |
136 | 191 |
137 protected: | 192 protected: |
138 size_t allocs_intercepted_by_size[kMaxSizeTracked]; | 193 size_t allocs_intercepted_by_size[kMaxSizeTracked]; |
139 size_t zero_allocs_intercepted_by_size[kMaxSizeTracked]; | 194 size_t zero_allocs_intercepted_by_size[kMaxSizeTracked]; |
140 size_t aligned_allocs_intercepted_by_size[kMaxSizeTracked]; | 195 size_t aligned_allocs_intercepted_by_size[kMaxSizeTracked]; |
141 size_t aligned_allocs_intercepted_by_alignment[kMaxSizeTracked]; | 196 size_t aligned_allocs_intercepted_by_alignment[kMaxSizeTracked]; |
142 size_t reallocs_intercepted_by_size[kMaxSizeTracked]; | 197 size_t reallocs_intercepted_by_size[kMaxSizeTracked]; |
143 size_t reallocs_intercepted_by_addr[kMaxSizeTracked]; | 198 size_t reallocs_intercepted_by_addr[kMaxSizeTracked]; |
144 size_t frees_intercepted_by_addr[kMaxSizeTracked]; | 199 size_t frees_intercepted_by_addr[kMaxSizeTracked]; |
145 std::unique_ptr<ThreadLocalBoolean> did_fail_realloc_0x420_once; | 200 size_t batch_mallocs_intercepted_by_size[kMaxSizeTracked]; |
| 201 size_t batch_frees_intercepted_by_addr[kMaxSizeTracked]; |
| 202 size_t free_definite_sizes_intercepted_by_size[kMaxSizeTracked]; |
| 203 std::unique_ptr<ThreadLocalBoolean> did_fail_realloc_0xfeed_once; |
146 subtle::Atomic32 num_new_handler_calls; | 204 subtle::Atomic32 num_new_handler_calls; |
147 | 205 |
148 private: | 206 private: |
149 static AllocatorShimTest* instance_; | 207 static AllocatorShimTest* instance_; |
150 }; | 208 }; |
151 | 209 |
152 struct TestStruct1 { | 210 struct TestStruct1 { |
153 uint32_t ignored; | 211 uint32_t ignored; |
154 uint8_t ignored_2; | 212 uint8_t ignored_2; |
155 }; | 213 }; |
156 | 214 |
157 struct TestStruct2 { | 215 struct TestStruct2 { |
158 uint64_t ignored; | 216 uint64_t ignored; |
159 uint8_t ignored_3; | 217 uint8_t ignored_3; |
160 }; | 218 }; |
161 | 219 |
162 class ThreadDelegateForNewHandlerTest : public PlatformThread::Delegate { | 220 class ThreadDelegateForNewHandlerTest : public PlatformThread::Delegate { |
163 public: | 221 public: |
164 ThreadDelegateForNewHandlerTest(WaitableEvent* event) : event_(event) {} | 222 ThreadDelegateForNewHandlerTest(WaitableEvent* event) : event_(event) {} |
165 | 223 |
166 void ThreadMain() override { | 224 void ThreadMain() override { |
167 event_->Wait(); | 225 event_->Wait(); |
168 void* res = realloc(reinterpret_cast<void*>(0x420ul), 1); | 226 void* temp = malloc(1); |
169 EXPECT_EQ(reinterpret_cast<void*>(0x420ul), res); | 227 void* res = realloc(temp, 0xFEED); |
| 228 EXPECT_EQ(temp, res); |
170 } | 229 } |
171 | 230 |
172 private: | 231 private: |
173 WaitableEvent* event_; | 232 WaitableEvent* event_; |
174 }; | 233 }; |
175 | 234 |
176 AllocatorShimTest* AllocatorShimTest::instance_ = nullptr; | 235 AllocatorShimTest* AllocatorShimTest::instance_ = nullptr; |
177 | 236 |
178 AllocatorDispatch g_mock_dispatch = { | 237 AllocatorDispatch g_mock_dispatch = { |
179 &AllocatorShimTest::MockAlloc, /* alloc_function */ | 238 &AllocatorShimTest::MockAlloc, /* alloc_function */ |
180 &AllocatorShimTest::MockAllocZeroInit, /* alloc_zero_initialized_function */ | 239 &AllocatorShimTest::MockAllocZeroInit, /* alloc_zero_initialized_function */ |
181 &AllocatorShimTest::MockAllocAligned, /* alloc_aligned_function */ | 240 &AllocatorShimTest::MockAllocAligned, /* alloc_aligned_function */ |
182 &AllocatorShimTest::MockRealloc, /* realloc_function */ | 241 &AllocatorShimTest::MockRealloc, /* realloc_function */ |
183 &AllocatorShimTest::MockFree, /* free_function */ | 242 &AllocatorShimTest::MockFree, /* free_function */ |
184 nullptr, /* next */ | 243 &AllocatorShimTest::MockGetSizeEstimate, /* get_size_estimate_function */ |
| 244 &AllocatorShimTest::MockBatchMalloc, /* batch_malloc_function */ |
| 245 &AllocatorShimTest::MockBatchFree, /* batch_free_function */ |
| 246 &AllocatorShimTest::MockFreeDefiniteSize, /* free_definite_size_function */ |
| 247 nullptr, /* next */ |
185 }; | 248 }; |
186 | 249 |
187 TEST_F(AllocatorShimTest, InterceptLibcSymbols) { | 250 TEST_F(AllocatorShimTest, InterceptLibcSymbols) { |
188 InsertAllocatorDispatch(&g_mock_dispatch); | 251 InsertAllocatorDispatch(&g_mock_dispatch); |
189 | 252 |
190 void* alloc_ptr = malloc(19); | 253 void* alloc_ptr = malloc(19); |
191 ASSERT_NE(nullptr, alloc_ptr); | 254 ASSERT_NE(nullptr, alloc_ptr); |
192 ASSERT_GE(allocs_intercepted_by_size[19], 1u); | 255 ASSERT_GE(allocs_intercepted_by_size[19], 1u); |
193 | 256 |
194 void* zero_alloc_ptr = calloc(2, 23); | 257 void* zero_alloc_ptr = calloc(2, 23); |
195 ASSERT_NE(nullptr, zero_alloc_ptr); | 258 ASSERT_NE(nullptr, zero_alloc_ptr); |
196 ASSERT_GE(zero_allocs_intercepted_by_size[2 * 23], 1u); | 259 ASSERT_GE(zero_allocs_intercepted_by_size[2 * 23], 1u); |
197 | 260 |
198 #if !defined(OS_WIN) | 261 #if !defined(OS_WIN) |
199 void* memalign_ptr = memalign(128, 53); | 262 const size_t kPageSize = base::GetPageSize(); |
200 ASSERT_NE(nullptr, memalign_ptr); | |
201 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(memalign_ptr) % 128); | |
202 ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u); | |
203 ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u); | |
204 | |
205 void* posix_memalign_ptr = nullptr; | 263 void* posix_memalign_ptr = nullptr; |
206 int res = posix_memalign(&posix_memalign_ptr, 256, 59); | 264 int res = posix_memalign(&posix_memalign_ptr, 256, 59); |
207 ASSERT_EQ(0, res); | 265 ASSERT_EQ(0, res); |
208 ASSERT_NE(nullptr, posix_memalign_ptr); | 266 ASSERT_NE(nullptr, posix_memalign_ptr); |
209 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(posix_memalign_ptr) % 256); | 267 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(posix_memalign_ptr) % 256); |
210 ASSERT_GE(aligned_allocs_intercepted_by_alignment[256], 1u); | 268 ASSERT_GE(aligned_allocs_intercepted_by_alignment[256], 1u); |
211 ASSERT_GE(aligned_allocs_intercepted_by_size[59], 1u); | 269 ASSERT_GE(aligned_allocs_intercepted_by_size[59], 1u); |
212 | 270 |
213 void* valloc_ptr = valloc(61); | 271 void* valloc_ptr = valloc(61); |
214 ASSERT_NE(nullptr, valloc_ptr); | 272 ASSERT_NE(nullptr, valloc_ptr); |
215 const size_t kPageSize = base::GetPageSize(); | |
216 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(valloc_ptr) % kPageSize); | 273 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(valloc_ptr) % kPageSize); |
217 ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u); | 274 ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u); |
218 ASSERT_GE(aligned_allocs_intercepted_by_size[61], 1u); | 275 ASSERT_GE(aligned_allocs_intercepted_by_size[61], 1u); |
| 276 #endif // !OS_WIN |
| 277 |
| 278 #if !defined(OS_WIN) && !defined(OS_MACOSX) |
| 279 void* memalign_ptr = memalign(128, 53); |
| 280 ASSERT_NE(nullptr, memalign_ptr); |
| 281 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(memalign_ptr) % 128); |
| 282 ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u); |
| 283 ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u); |
219 | 284 |
220 void* pvalloc_ptr = pvalloc(67); | 285 void* pvalloc_ptr = pvalloc(67); |
221 ASSERT_NE(nullptr, pvalloc_ptr); | 286 ASSERT_NE(nullptr, pvalloc_ptr); |
222 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize); | 287 ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize); |
223 ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u); | 288 ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u); |
224 // pvalloc rounds the size up to the next page. | 289 // pvalloc rounds the size up to the next page. |
225 ASSERT_GE(aligned_allocs_intercepted_by_size[kPageSize], 1u); | 290 ASSERT_GE(aligned_allocs_intercepted_by_size[kPageSize], 1u); |
226 #endif // OS_WIN | 291 #endif // !OS_WIN && !OS_MACOSX |
227 | 292 |
228 char* realloc_ptr = static_cast<char*>(realloc(nullptr, 71)); | 293 char* realloc_ptr = static_cast<char*>(malloc(10)); |
229 ASSERT_NE(nullptr, realloc_ptr); | |
230 ASSERT_GE(reallocs_intercepted_by_size[71], 1u); | |
231 ASSERT_GE(reallocs_intercepted_by_addr[Hash(nullptr)], 1u); | |
232 strcpy(realloc_ptr, "foobar"); | 294 strcpy(realloc_ptr, "foobar"); |
233 void* old_realloc_ptr = realloc_ptr; | 295 void* old_realloc_ptr = realloc_ptr; |
234 realloc_ptr = static_cast<char*>(realloc(realloc_ptr, 73)); | 296 realloc_ptr = static_cast<char*>(realloc(realloc_ptr, 73)); |
235 ASSERT_GE(reallocs_intercepted_by_size[73], 1u); | 297 ASSERT_GE(reallocs_intercepted_by_size[73], 1u); |
236 ASSERT_GE(reallocs_intercepted_by_addr[Hash(old_realloc_ptr)], 1u); | 298 ASSERT_GE(reallocs_intercepted_by_addr[Hash(old_realloc_ptr)], 1u); |
237 ASSERT_EQ(0, strcmp(realloc_ptr, "foobar")); | 299 ASSERT_EQ(0, strcmp(realloc_ptr, "foobar")); |
238 | 300 |
239 free(alloc_ptr); | 301 free(alloc_ptr); |
240 ASSERT_GE(frees_intercepted_by_addr[Hash(alloc_ptr)], 1u); | 302 ASSERT_GE(frees_intercepted_by_addr[Hash(alloc_ptr)], 1u); |
241 | 303 |
242 free(zero_alloc_ptr); | 304 free(zero_alloc_ptr); |
243 ASSERT_GE(frees_intercepted_by_addr[Hash(zero_alloc_ptr)], 1u); | 305 ASSERT_GE(frees_intercepted_by_addr[Hash(zero_alloc_ptr)], 1u); |
244 | 306 |
245 #if !defined(OS_WIN) | 307 #if !defined(OS_WIN) && !defined(OS_MACOSX) |
246 free(memalign_ptr); | 308 free(memalign_ptr); |
247 ASSERT_GE(frees_intercepted_by_addr[Hash(memalign_ptr)], 1u); | 309 ASSERT_GE(frees_intercepted_by_addr[Hash(memalign_ptr)], 1u); |
248 | 310 |
| 311 free(pvalloc_ptr); |
| 312 ASSERT_GE(frees_intercepted_by_addr[Hash(pvalloc_ptr)], 1u); |
| 313 #endif // !OS_WIN && !OS_MACOSX |
| 314 |
| 315 #if !defined(OS_WIN) |
249 free(posix_memalign_ptr); | 316 free(posix_memalign_ptr); |
250 ASSERT_GE(frees_intercepted_by_addr[Hash(posix_memalign_ptr)], 1u); | 317 ASSERT_GE(frees_intercepted_by_addr[Hash(posix_memalign_ptr)], 1u); |
251 | 318 |
252 free(valloc_ptr); | 319 free(valloc_ptr); |
253 ASSERT_GE(frees_intercepted_by_addr[Hash(valloc_ptr)], 1u); | 320 ASSERT_GE(frees_intercepted_by_addr[Hash(valloc_ptr)], 1u); |
254 | 321 #endif // !OS_WIN |
255 free(pvalloc_ptr); | |
256 ASSERT_GE(frees_intercepted_by_addr[Hash(pvalloc_ptr)], 1u); | |
257 #endif // OS_WIN | |
258 | 322 |
259 free(realloc_ptr); | 323 free(realloc_ptr); |
260 ASSERT_GE(frees_intercepted_by_addr[Hash(realloc_ptr)], 1u); | 324 ASSERT_GE(frees_intercepted_by_addr[Hash(realloc_ptr)], 1u); |
261 | 325 |
262 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); | 326 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); |
263 | 327 |
264 void* non_hooked_ptr = malloc(4095); | 328 void* non_hooked_ptr = malloc(4095); |
265 ASSERT_NE(nullptr, non_hooked_ptr); | 329 ASSERT_NE(nullptr, non_hooked_ptr); |
266 ASSERT_EQ(0u, allocs_intercepted_by_size[4095]); | 330 ASSERT_EQ(0u, allocs_intercepted_by_size[4095]); |
267 free(non_hooked_ptr); | 331 free(non_hooked_ptr); |
268 } | 332 } |
269 | 333 |
| 334 #if defined(OS_MACOSX) |
| 335 TEST_F(AllocatorShimTest, InterceptLibcSymbolsBatchMallocFree) { |
| 336 InsertAllocatorDispatch(&g_mock_dispatch); |
| 337 |
| 338 unsigned count = 13; |
| 339 std::vector<void*> results; |
| 340 results.resize(count); |
| 341 unsigned result_count = malloc_zone_batch_malloc(malloc_default_zone(), 99, |
| 342 results.data(), count); |
| 343 ASSERT_EQ(count, result_count); |
| 344 ASSERT_EQ(count, batch_mallocs_intercepted_by_size[99]); |
| 345 |
| 346 std::vector<void*> results_copy(results); |
| 347 malloc_zone_batch_free(malloc_default_zone(), results.data(), count); |
| 348 for (void* result : results_copy) { |
| 349 ASSERT_GE(batch_frees_intercepted_by_addr[Hash(result)], 1u); |
| 350 } |
| 351 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); |
| 352 } |
| 353 |
| 354 TEST_F(AllocatorShimTest, InterceptLibcSymbolsFreeDefiniteSize) { |
| 355 InsertAllocatorDispatch(&g_mock_dispatch); |
| 356 |
| 357 void* alloc_ptr = malloc(19); |
| 358 ASSERT_NE(nullptr, alloc_ptr); |
| 359 ASSERT_GE(allocs_intercepted_by_size[19], 1u); |
| 360 |
| 361 ChromeMallocZone* default_zone = |
| 362 reinterpret_cast<ChromeMallocZone*>(malloc_default_zone()); |
| 363 default_zone->free_definite_size(malloc_default_zone(), alloc_ptr, 19); |
| 364 ASSERT_GE(free_definite_sizes_intercepted_by_size[19], 1u); |
| 365 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); |
| 366 } |
| 367 #endif // defined(OS_MACOSX) |
| 368 |
270 TEST_F(AllocatorShimTest, InterceptCppSymbols) { | 369 TEST_F(AllocatorShimTest, InterceptCppSymbols) { |
271 InsertAllocatorDispatch(&g_mock_dispatch); | 370 InsertAllocatorDispatch(&g_mock_dispatch); |
272 | 371 |
273 TestStruct1* new_ptr = new TestStruct1; | 372 TestStruct1* new_ptr = new TestStruct1; |
274 ASSERT_NE(nullptr, new_ptr); | 373 ASSERT_NE(nullptr, new_ptr); |
275 ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1)], 1u); | 374 ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1)], 1u); |
276 | 375 |
277 TestStruct1* new_array_ptr = new TestStruct1[3]; | 376 TestStruct1* new_array_ptr = new TestStruct1[3]; |
278 ASSERT_NE(nullptr, new_array_ptr); | 377 ASSERT_NE(nullptr, new_array_ptr); |
279 ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1) * 3], 1u); | 378 ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1) * 3], 1u); |
(...skipping 17 matching lines...) Expand all Loading... |
297 | 396 |
298 delete[] new_array_nt_ptr; | 397 delete[] new_array_nt_ptr; |
299 ASSERT_GE(frees_intercepted_by_addr[Hash(new_array_nt_ptr)], 1u); | 398 ASSERT_GE(frees_intercepted_by_addr[Hash(new_array_nt_ptr)], 1u); |
300 | 399 |
301 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); | 400 RemoveAllocatorDispatchForTesting(&g_mock_dispatch); |
302 } | 401 } |
303 | 402 |
304 // This test exercises the case of concurrent OOM failure, which would end up | 403 // This test exercises the case of concurrent OOM failure, which would end up |
305 // invoking std::new_handler concurrently. This is to cover the CallNewHandler() | 404 // invoking std::new_handler concurrently. This is to cover the CallNewHandler() |
306 // paths of allocator_shim.cc and smoke-test its thread safey. | 405 // paths of allocator_shim.cc and smoke-test its thread safey. |
307 // The test creates kNumThreads threads. Each of them does just a | 406 // The test creates kNumThreads threads. Each of them mallocs some memory, and |
308 // realloc(0x420). | 407 // then does a realloc(<new memory>, 0xFEED). |
309 // The shim intercepts such realloc and makes it fail only once on each thread. | 408 // The shim intercepts such realloc and makes it fail only once on each thread. |
310 // We expect to see excactly kNumThreads invocations of the new_handler. | 409 // We expect to see excactly kNumThreads invocations of the new_handler. |
311 TEST_F(AllocatorShimTest, NewHandlerConcurrency) { | 410 TEST_F(AllocatorShimTest, NewHandlerConcurrency) { |
312 const int kNumThreads = 32; | 411 const int kNumThreads = 32; |
313 PlatformThreadHandle threads[kNumThreads]; | 412 PlatformThreadHandle threads[kNumThreads]; |
314 | 413 |
315 // The WaitableEvent here is used to attempt to trigger all the threads at | 414 // The WaitableEvent here is used to attempt to trigger all the threads at |
316 // the same time, after they have been initialized. | 415 // the same time, after they have been initialized. |
317 WaitableEvent event(WaitableEvent::ResetPolicy::MANUAL, | 416 WaitableEvent event(WaitableEvent::ResetPolicy::MANUAL, |
318 WaitableEvent::InitialState::NOT_SIGNALED); | 417 WaitableEvent::InitialState::NOT_SIGNALED); |
(...skipping 15 matching lines...) Expand all Loading... |
334 | 433 |
335 #if defined(OS_WIN) && BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | 434 #if defined(OS_WIN) && BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
336 TEST_F(AllocatorShimTest, ShimReplacesCRTHeapWhenEnabled) { | 435 TEST_F(AllocatorShimTest, ShimReplacesCRTHeapWhenEnabled) { |
337 ASSERT_NE(::GetProcessHeap(), reinterpret_cast<HANDLE>(_get_heap_handle())); | 436 ASSERT_NE(::GetProcessHeap(), reinterpret_cast<HANDLE>(_get_heap_handle())); |
338 } | 437 } |
339 #endif // defined(OS_WIN) && BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | 438 #endif // defined(OS_WIN) && BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
340 | 439 |
341 } // namespace | 440 } // namespace |
342 } // namespace allocator | 441 } // namespace allocator |
343 } // namespace base | 442 } // namespace base |
OLD | NEW |