Chromium Code Reviews| Index: src/heap.cc |
| diff --git a/src/heap.cc b/src/heap.cc |
| index b5fe18454e31005d64bb7e611a25dd60449eea67..4eaed4119fc8bbb2b6ec1311c8cc5cee52aa8ed5 100644 |
| --- a/src/heap.cc |
| +++ b/src/heap.cc |
| @@ -87,7 +87,6 @@ Heap::Heap() |
| contexts_disposed_(0), |
| global_ic_age_(0), |
| flush_monomorphic_ics_(false), |
| - allocation_mementos_found_(0), |
| scan_on_scavenge_pages_(0), |
| new_space_(this), |
| old_pointer_space_(NULL), |
| @@ -506,6 +505,42 @@ void Heap::RepairFreeListsAfterBoot() { |
| void Heap::GarbageCollectionEpilogue() { |
| + if (FLAG_allocation_site_pretenuring) { |
| + int tenure_decisions = 0; |
| + int dont_tenure_decisions = 0; |
| + int allocation_mementos_found = 0; |
| + |
| + Object* cur = allocation_sites_list(); |
| + if (cur->IsAllocationSite()) { |
|
Hannes Payer (out of office)
2013/11/25 11:45:52
I guess the if is not needed.
mvstanton
2013/11/25 13:49:18
Done.
|
| + while (cur->IsAllocationSite()) { |
| + AllocationSite* casted = AllocationSite::cast(cur); |
| + allocation_mementos_found += casted->memento_found_count()->value(); |
| + if (casted->DigestPretenuringFeedback()) { |
| + if (casted->GetPretenureMode() == TENURED) { |
| + tenure_decisions++; |
| + } else { |
| + dont_tenure_decisions++; |
| + } |
| + } |
| + cur = casted->weak_next(); |
| + } |
| + } |
| + |
| + // TODO(mvstanton): Pretenure decisions are only made once for an allocation |
| + // site. Find a sane way to decide about revisiting the decision later. |
| + |
| + if (FLAG_trace_track_allocation_sites && |
| + (allocation_mementos_found > 0 || |
| + tenure_decisions > 0 || |
| + dont_tenure_decisions > 0)) { |
| + PrintF("GC: (#mementos, #tenure decisions, #donttenure decisions) " |
| + "(%d, %d, %d)\n", |
| + allocation_mementos_found, |
| + tenure_decisions, |
| + dont_tenure_decisions); |
| + } |
| + } |
| + |
| store_buffer()->GCEpilogue(); |
| // In release mode, we only zap the from space under heap verification. |
| @@ -1393,8 +1428,6 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer { |
| void Heap::Scavenge() { |
| RelocationLock relocation_lock(this); |
| - allocation_mementos_found_ = 0; |
| - |
| #ifdef VERIFY_HEAP |
| if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this); |
| #endif |
| @@ -1542,11 +1575,6 @@ void Heap::Scavenge() { |
| gc_state_ = NOT_IN_GC; |
| scavenges_since_last_idle_round_++; |
| - |
| - if (FLAG_trace_track_allocation_sites && allocation_mementos_found_ > 0) { |
| - PrintF("AllocationMementos found during scavenge = %d\n", |
| - allocation_mementos_found_); |
| - } |
| } |
| @@ -4376,6 +4404,9 @@ MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space, |
| alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| ASSERT(allocation_site->map() == allocation_site_map()); |
| alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); |
| + if (FLAG_allocation_site_pretenuring) { |
| + allocation_site->IncrementMementoCreateCount(); |
| + } |
| return result; |
| } |
| @@ -4808,8 +4839,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { |
| int object_size = map->instance_size(); |
| Object* clone; |
| - ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) && |
| - map->instance_type() == JS_ARRAY_TYPE)); |
| + ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type())); |
| WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| @@ -4851,6 +4881,9 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { |
| alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| ASSERT(site->map() == allocation_site_map()); |
| alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
| + if (FLAG_allocation_site_pretenuring) { |
| + site->IncrementMementoCreateCount(); |
| + } |
| HeapProfiler* profiler = isolate()->heap_profiler(); |
| if (profiler->is_tracking_allocations()) { |
| profiler->UpdateObjectSizeEvent(HeapObject::cast(clone)->address(), |