OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_INL_H_ | 5 #ifndef V8_HEAP_HEAP_INL_H_ |
6 #define V8_HEAP_HEAP_INL_H_ | 6 #define V8_HEAP_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
250 return allocation; | 250 return allocation; |
251 } | 251 } |
252 | 252 |
253 | 253 |
254 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) { | 254 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) { |
255 HeapProfiler* profiler = isolate_->heap_profiler(); | 255 HeapProfiler* profiler = isolate_->heap_profiler(); |
256 if (profiler->is_tracking_allocations()) { | 256 if (profiler->is_tracking_allocations()) { |
257 profiler->AllocationEvent(object->address(), size_in_bytes); | 257 profiler->AllocationEvent(object->address(), size_in_bytes); |
258 } | 258 } |
259 | 259 |
260 ++allocations_count_; | 260 if (FLAG_verify_predictable) { |
261 ++allocations_count_; | |
ulan
2015/11/12 11:38:15
Why the increment is conditional now?
Igor Sheludko
2015/11/12 11:53:30
We use allocations_count only for --verify-predict
| |
262 // Advance synthetic time by making a time request. | |
263 MonotonicallyIncreasingTimeInMs(); | |
261 | 264 |
262 if (FLAG_verify_predictable) { | |
263 UpdateAllocationsHash(object); | 265 UpdateAllocationsHash(object); |
264 UpdateAllocationsHash(size_in_bytes); | 266 UpdateAllocationsHash(size_in_bytes); |
265 | 267 |
266 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | 268 if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) { |
267 (--dump_allocations_hash_countdown_ == 0)) { | |
268 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
269 PrintAlloctionsHash(); | 269 PrintAlloctionsHash(); |
270 } | 270 } |
271 } | 271 } |
272 | 272 |
273 if (FLAG_trace_allocation_stack_interval > 0) { | 273 if (FLAG_trace_allocation_stack_interval > 0) { |
274 if (!FLAG_verify_predictable) ++allocations_count_; | |
274 if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) { | 275 if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) { |
275 isolate()->PrintStack(stdout, Isolate::kPrintStackConcise); | 276 isolate()->PrintStack(stdout, Isolate::kPrintStackConcise); |
276 } | 277 } |
277 } | 278 } |
278 } | 279 } |
279 | 280 |
280 | 281 |
281 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, | 282 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, |
282 int size_in_bytes) { | 283 int size_in_bytes) { |
283 HeapProfiler* heap_profiler = isolate_->heap_profiler(); | 284 HeapProfiler* heap_profiler = isolate_->heap_profiler(); |
284 if (heap_profiler->is_tracking_object_moves()) { | 285 if (heap_profiler->is_tracking_object_moves()) { |
285 heap_profiler->ObjectMoveEvent(source->address(), target->address(), | 286 heap_profiler->ObjectMoveEvent(source->address(), target->address(), |
286 size_in_bytes); | 287 size_in_bytes); |
287 } | 288 } |
288 if (target->IsSharedFunctionInfo()) { | 289 if (target->IsSharedFunctionInfo()) { |
289 LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(), | 290 LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(), |
290 target->address())); | 291 target->address())); |
291 } | 292 } |
292 | 293 |
293 if (FLAG_verify_predictable) { | 294 if (FLAG_verify_predictable) { |
294 ++allocations_count_; | 295 ++allocations_count_; |
296 // Advance synthetic time by making a time request. | |
297 MonotonicallyIncreasingTimeInMs(); | |
295 | 298 |
296 UpdateAllocationsHash(source); | 299 UpdateAllocationsHash(source); |
297 UpdateAllocationsHash(target); | 300 UpdateAllocationsHash(target); |
298 UpdateAllocationsHash(size_in_bytes); | 301 UpdateAllocationsHash(size_in_bytes); |
299 | 302 |
300 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | 303 if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) { |
301 (--dump_allocations_hash_countdown_ == 0)) { | |
302 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
303 PrintAlloctionsHash(); | 304 PrintAlloctionsHash(); |
304 } | 305 } |
305 } | 306 } |
307 } | |
308 | |
309 | |
310 void Heap::OnFreeEvent(Address start, int size_in_bytes) { | |
311 if (FLAG_verify_predictable) { | |
312 HeapObject* object = HeapObject::FromAddress(start); | |
313 ++allocations_count_; | |
314 // Advance synthetic time by making a time request. | |
315 MonotonicallyIncreasingTimeInMs(); | |
316 | |
317 UpdateAllocationsHash(object); | |
318 UpdateAllocationsHash(size_in_bytes); | |
319 | |
320 if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) { | |
321 PrintAlloctionsHash(); | |
322 } | |
323 } | |
306 } | 324 } |
307 | 325 |
308 | 326 |
309 void Heap::UpdateAllocationsHash(HeapObject* object) { | 327 void Heap::UpdateAllocationsHash(HeapObject* object) { |
310 Address object_address = object->address(); | 328 Address object_address = object->address(); |
311 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); | 329 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); |
312 AllocationSpace allocation_space = memory_chunk->owner()->identity(); | 330 AllocationSpace allocation_space = memory_chunk->owner()->identity(); |
313 | 331 |
314 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32); | 332 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32); |
315 uint32_t value = | 333 uint32_t value = |
(...skipping 384 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
700 | 718 |
701 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { | 719 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { |
702 for (Object** current = start; current < end; current++) { | 720 for (Object** current = start; current < end; current++) { |
703 CHECK((*current)->IsSmi()); | 721 CHECK((*current)->IsSmi()); |
704 } | 722 } |
705 } | 723 } |
706 } // namespace internal | 724 } // namespace internal |
707 } // namespace v8 | 725 } // namespace v8 |
708 | 726 |
709 #endif // V8_HEAP_HEAP_INL_H_ | 727 #endif // V8_HEAP_HEAP_INL_H_ |
OLD | NEW |