Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(39)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1343043002: [heap] Move slots buffer into a separate file. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
11 #include "src/cpu-profiler.h" 11 #include "src/cpu-profiler.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker.h" 17 #include "src/heap/array-buffer-tracker.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting.h" 22 #include "src/heap/objects-visiting.h"
23 #include "src/heap/objects-visiting-inl.h" 23 #include "src/heap/objects-visiting-inl.h"
24 #include "src/heap/slots-buffer.h"
24 #include "src/heap/spaces-inl.h" 25 #include "src/heap/spaces-inl.h"
25 #include "src/heap-profiler.h" 26 #include "src/heap-profiler.h"
26 #include "src/ic/ic.h" 27 #include "src/ic/ic.h"
27 #include "src/ic/stub-cache.h" 28 #include "src/ic/stub-cache.h"
28 #include "src/v8.h" 29 #include "src/v8.h"
29 30
30 namespace v8 { 31 namespace v8 {
31 namespace internal { 32 namespace internal {
32 33
33 34
(...skipping 17 matching lines...) Expand all
51 state_(IDLE), 52 state_(IDLE),
52 #endif 53 #endif
53 marking_parity_(ODD_MARKING_PARITY), 54 marking_parity_(ODD_MARKING_PARITY),
54 compacting_(false), 55 compacting_(false),
55 was_marked_incrementally_(false), 56 was_marked_incrementally_(false),
56 sweeping_in_progress_(false), 57 sweeping_in_progress_(false),
57 parallel_compaction_in_progress_(false), 58 parallel_compaction_in_progress_(false),
58 pending_sweeper_jobs_semaphore_(0), 59 pending_sweeper_jobs_semaphore_(0),
59 pending_compaction_jobs_semaphore_(0), 60 pending_compaction_jobs_semaphore_(0),
60 evacuation_(false), 61 evacuation_(false),
61 migration_slots_buffer_(NULL), 62 slots_buffer_allocator_(nullptr),
63 migration_slots_buffer_(nullptr),
62 heap_(heap), 64 heap_(heap),
63 marking_deque_memory_(NULL), 65 marking_deque_memory_(NULL),
64 marking_deque_memory_committed_(0), 66 marking_deque_memory_committed_(0),
65 code_flusher_(NULL), 67 code_flusher_(NULL),
66 have_code_to_deoptimize_(false) { 68 have_code_to_deoptimize_(false) {
67 } 69 }
68 70
69 #ifdef VERIFY_HEAP 71 #ifdef VERIFY_HEAP
70 class VerifyMarkingVisitor : public ObjectVisitor { 72 class VerifyMarkingVisitor : public ObjectVisitor {
71 public: 73 public:
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
232 } 234 }
233 #endif // VERIFY_HEAP 235 #endif // VERIFY_HEAP
234 236
235 237
236 void MarkCompactCollector::SetUp() { 238 void MarkCompactCollector::SetUp() {
237 free_list_old_space_.Reset(new FreeList(heap_->old_space())); 239 free_list_old_space_.Reset(new FreeList(heap_->old_space()));
238 free_list_code_space_.Reset(new FreeList(heap_->code_space())); 240 free_list_code_space_.Reset(new FreeList(heap_->code_space()));
239 free_list_map_space_.Reset(new FreeList(heap_->map_space())); 241 free_list_map_space_.Reset(new FreeList(heap_->map_space()));
240 EnsureMarkingDequeIsReserved(); 242 EnsureMarkingDequeIsReserved();
241 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); 243 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
244 slots_buffer_allocator_ = new SlotsBufferAllocator();
242 } 245 }
243 246
244 247
245 void MarkCompactCollector::TearDown() { 248 void MarkCompactCollector::TearDown() {
246 AbortCompaction(); 249 AbortCompaction();
247 delete marking_deque_memory_; 250 delete marking_deque_memory_;
251 delete slots_buffer_allocator_;
248 } 252 }
249 253
250 254
251 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { 255 void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
252 DCHECK(!p->NeverEvacuate()); 256 DCHECK(!p->NeverEvacuate());
253 p->MarkEvacuationCandidate(); 257 p->MarkEvacuationCandidate();
254 evacuation_candidates_.Add(p); 258 evacuation_candidates_.Add(p);
255 } 259 }
256 260
257 261
(...skipping 526 matching lines...) Expand 10 before | Expand all | Expand 10 after
784 (reduce_memory ? "reduce memory footprint" : "normal")); 788 (reduce_memory ? "reduce memory footprint" : "normal"));
785 } 789 }
786 } 790 }
787 791
788 792
789 void MarkCompactCollector::AbortCompaction() { 793 void MarkCompactCollector::AbortCompaction() {
790 if (compacting_) { 794 if (compacting_) {
791 int npages = evacuation_candidates_.length(); 795 int npages = evacuation_candidates_.length();
792 for (int i = 0; i < npages; i++) { 796 for (int i = 0; i < npages; i++) {
793 Page* p = evacuation_candidates_[i]; 797 Page* p = evacuation_candidates_[i];
794 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); 798 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
795 p->ClearEvacuationCandidate(); 799 p->ClearEvacuationCandidate();
796 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 800 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
797 } 801 }
798 compacting_ = false; 802 compacting_ = false;
799 evacuation_candidates_.Rewind(0); 803 evacuation_candidates_.Rewind(0);
800 } 804 }
801 DCHECK_EQ(0, evacuation_candidates_.length()); 805 DCHECK_EQ(0, evacuation_candidates_.length());
802 } 806 }
803 807
804 808
(...skipping 1753 matching lines...) Expand 10 before | Expand all | Expand 10 after
2558 // require synchronization. 2562 // require synchronization.
2559 if (heap_->InNewSpace(value)) { 2563 if (heap_->InNewSpace(value)) {
2560 if (parallel_compaction_in_progress_) { 2564 if (parallel_compaction_in_progress_) {
2561 heap_->store_buffer()->MarkSynchronized(slot); 2565 heap_->store_buffer()->MarkSynchronized(slot);
2562 } else { 2566 } else {
2563 heap_->store_buffer()->Mark(slot); 2567 heap_->store_buffer()->Mark(slot);
2564 } 2568 }
2565 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { 2569 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2566 if (parallel_compaction_in_progress_) { 2570 if (parallel_compaction_in_progress_) {
2567 SlotsBuffer::AddToSynchronized( 2571 SlotsBuffer::AddToSynchronized(
2568 &slots_buffer_allocator_, &migration_slots_buffer_, 2572 slots_buffer_allocator_, &migration_slots_buffer_,
2569 &migration_slots_buffer_mutex_, reinterpret_cast<Object**>(slot), 2573 &migration_slots_buffer_mutex_, reinterpret_cast<Object**>(slot),
2570 SlotsBuffer::IGNORE_OVERFLOW); 2574 SlotsBuffer::IGNORE_OVERFLOW);
2571 } else { 2575 } else {
2572 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2576 SlotsBuffer::AddTo(slots_buffer_allocator_, &migration_slots_buffer_,
2573 reinterpret_cast<Object**>(slot), 2577 reinterpret_cast<Object**>(slot),
2574 SlotsBuffer::IGNORE_OVERFLOW); 2578 SlotsBuffer::IGNORE_OVERFLOW);
2575 } 2579 }
2576 } 2580 }
2577 } 2581 }
2578 2582
2579 2583
2584 void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
2585 Object* target) {
2586 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2587 if (target_page->IsEvacuationCandidate() &&
2588 !ShouldSkipEvacuationSlotRecording(object)) {
2589 if (!SlotsBuffer::AddTo(slots_buffer_allocator_,
2590 target_page->slots_buffer_address(), slot,
2591 SlotsBuffer::FAIL_ON_OVERFLOW)) {
2592 EvictPopularEvacuationCandidate(target_page);
2593 }
2594 }
2595 }
2596
2597
2598 void MarkCompactCollector::ForceRecordSlot(HeapObject* object, Object** slot,
2599 Object* target) {
2600 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2601 if (target_page->IsEvacuationCandidate() &&
2602 !ShouldSkipEvacuationSlotRecording(object)) {
2603 CHECK(SlotsBuffer::AddTo(slots_buffer_allocator_,
2604 target_page->slots_buffer_address(), slot,
2605 SlotsBuffer::IGNORE_OVERFLOW));
2606 }
2607 }
2608
2609
2580 void MarkCompactCollector::RecordMigratedCodeEntrySlot( 2610 void MarkCompactCollector::RecordMigratedCodeEntrySlot(
2581 Address code_entry, Address code_entry_slot) { 2611 Address code_entry, Address code_entry_slot) {
2582 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { 2612 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2583 if (parallel_compaction_in_progress_) { 2613 if (parallel_compaction_in_progress_) {
2584 SlotsBuffer::AddToSynchronized( 2614 SlotsBuffer::AddToSynchronized(
2585 &slots_buffer_allocator_, &migration_slots_buffer_, 2615 slots_buffer_allocator_, &migration_slots_buffer_,
2586 &migration_slots_buffer_mutex_, SlotsBuffer::CODE_ENTRY_SLOT, 2616 &migration_slots_buffer_mutex_, SlotsBuffer::CODE_ENTRY_SLOT,
2587 code_entry_slot, SlotsBuffer::IGNORE_OVERFLOW); 2617 code_entry_slot, SlotsBuffer::IGNORE_OVERFLOW);
2588 } else { 2618 } else {
2589 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2619 SlotsBuffer::AddTo(slots_buffer_allocator_, &migration_slots_buffer_,
2590 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, 2620 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
2591 SlotsBuffer::IGNORE_OVERFLOW); 2621 SlotsBuffer::IGNORE_OVERFLOW);
2592 } 2622 }
2593 } 2623 }
2594 } 2624 }
2595 2625
2596 2626
2597 void MarkCompactCollector::RecordMigratedCodeObjectSlot(Address code_object) { 2627 void MarkCompactCollector::RecordMigratedCodeObjectSlot(Address code_object) {
2598 if (parallel_compaction_in_progress_) { 2628 if (parallel_compaction_in_progress_) {
2599 SlotsBuffer::AddToSynchronized( 2629 SlotsBuffer::AddToSynchronized(
2600 &slots_buffer_allocator_, &migration_slots_buffer_, 2630 slots_buffer_allocator_, &migration_slots_buffer_,
2601 &migration_slots_buffer_mutex_, SlotsBuffer::RELOCATED_CODE_OBJECT, 2631 &migration_slots_buffer_mutex_, SlotsBuffer::RELOCATED_CODE_OBJECT,
2602 code_object, SlotsBuffer::IGNORE_OVERFLOW); 2632 code_object, SlotsBuffer::IGNORE_OVERFLOW);
2603 } else { 2633 } else {
2604 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2634 SlotsBuffer::AddTo(slots_buffer_allocator_, &migration_slots_buffer_,
2605 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object, 2635 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object,
2606 SlotsBuffer::IGNORE_OVERFLOW); 2636 SlotsBuffer::IGNORE_OVERFLOW);
2607 } 2637 }
2608 } 2638 }
2609 2639
2610 2640
2641 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2642 if (RelocInfo::IsCodeTarget(rmode)) {
2643 return SlotsBuffer::CODE_TARGET_SLOT;
2644 } else if (RelocInfo::IsCell(rmode)) {
2645 return SlotsBuffer::CELL_TARGET_SLOT;
2646 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2647 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
2648 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2649 return SlotsBuffer::DEBUG_TARGET_SLOT;
2650 }
2651 UNREACHABLE();
2652 return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
2653 }
2654
2655
2656 static inline SlotsBuffer::SlotType DecodeSlotType(
2657 SlotsBuffer::ObjectSlot slot) {
2658 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
2659 }
2660
2661
2662 void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
2663 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2664 RelocInfo::Mode rmode = rinfo->rmode();
2665 if (target_page->IsEvacuationCandidate() &&
2666 (rinfo->host() == NULL ||
2667 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
2668 Address addr = rinfo->pc();
2669 SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode);
2670 if (rinfo->IsInConstantPool()) {
2671 addr = rinfo->constant_pool_entry_address();
2672 if (RelocInfo::IsCodeTarget(rmode)) {
2673 slot_type = SlotsBuffer::CODE_ENTRY_SLOT;
2674 } else {
2675 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
2676 slot_type = SlotsBuffer::OBJECT_SLOT;
2677 }
2678 }
2679 bool success = SlotsBuffer::AddTo(
2680 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type,
2681 addr, SlotsBuffer::FAIL_ON_OVERFLOW);
2682 if (!success) {
2683 EvictPopularEvacuationCandidate(target_page);
2684 }
2685 }
2686 }
2687
2688
2611 // We scavenge new space simultaneously with sweeping. This is done in two 2689 // We scavenge new space simultaneously with sweeping. This is done in two
2612 // passes. 2690 // passes.
2613 // 2691 //
2614 // The first pass migrates all alive objects from one semispace to another or 2692 // The first pass migrates all alive objects from one semispace to another or
2615 // promotes them to old space. Forwarding address is written directly into 2693 // promotes them to old space. Forwarding address is written directly into
2616 // first word of object without any encoding. If object is dead we write 2694 // first word of object without any encoding. If object is dead we write
2617 // NULL as a forwarding address. 2695 // NULL as a forwarding address.
2618 // 2696 //
2619 // The second pass updates pointers to new space in all spaces. It is possible 2697 // The second pass updates pointers to new space in all spaces. It is possible
2620 // to encounter pointers to dead new space objects during traversal of pointers 2698 // to encounter pointers to dead new space objects during traversal of pointers
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
2715 } 2793 }
2716 } 2794 }
2717 2795
2718 2796
2719 void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src, 2797 void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src,
2720 int size) { 2798 int size) {
2721 heap()->MoveBlock(dst->address(), src->address(), size); 2799 heap()->MoveBlock(dst->address(), src->address(), size);
2722 } 2800 }
2723 2801
2724 2802
2803 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
2804 SlotsBuffer::SlotType slot_type, Address addr) {
2805 switch (slot_type) {
2806 case SlotsBuffer::CODE_TARGET_SLOT: {
2807 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL);
2808 rinfo.Visit(isolate, v);
2809 break;
2810 }
2811 case SlotsBuffer::CELL_TARGET_SLOT: {
2812 RelocInfo rinfo(addr, RelocInfo::CELL, 0, NULL);
2813 rinfo.Visit(isolate, v);
2814 break;
2815 }
2816 case SlotsBuffer::CODE_ENTRY_SLOT: {
2817 v->VisitCodeEntry(addr);
2818 break;
2819 }
2820 case SlotsBuffer::RELOCATED_CODE_OBJECT: {
2821 HeapObject* obj = HeapObject::FromAddress(addr);
2822 Code::cast(obj)->CodeIterateBody(v);
2823 break;
2824 }
2825 case SlotsBuffer::DEBUG_TARGET_SLOT: {
2826 RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, NULL);
2827 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
2828 break;
2829 }
2830 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
2831 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
2832 rinfo.Visit(isolate, v);
2833 break;
2834 }
2835 case SlotsBuffer::OBJECT_SLOT: {
2836 v->VisitPointer(reinterpret_cast<Object**>(addr));
2837 break;
2838 }
2839 default:
2840 UNREACHABLE();
2841 break;
2842 }
2843 }
2844
2845
2725 // Visitor for updating pointers from live objects in old spaces to new space. 2846 // Visitor for updating pointers from live objects in old spaces to new space.
2726 // It does not expect to encounter pointers to dead objects. 2847 // It does not expect to encounter pointers to dead objects.
2727 class PointersUpdatingVisitor : public ObjectVisitor { 2848 class PointersUpdatingVisitor : public ObjectVisitor {
2728 public: 2849 public:
2729 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) {} 2850 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) {}
2730 2851
2731 void VisitPointer(Object** p) { UpdatePointer(p); } 2852 void VisitPointer(Object** p) { UpdatePointer(p); }
2732 2853
2733 void VisitPointers(Object** start, Object** end) { 2854 void VisitPointers(Object** start, Object** end) {
2734 for (Object** p = start; p < end; p++) UpdatePointer(p); 2855 for (Object** p = start; p < end; p++) UpdatePointer(p);
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
2907 } 3028 }
2908 3029
2909 data[index++] = 0xeeeeeeeeeeUL; 3030 data[index++] = 0xeeeeeeeeeeUL;
2910 DCHECK(index < kDataBufferSize); 3031 DCHECK(index < kDataBufferSize);
2911 base::OS::PrintError("Data: %p\n", static_cast<void*>(data)); 3032 base::OS::PrintError("Data: %p\n", static_cast<void*>(data));
2912 base::OS::Abort(); 3033 base::OS::Abort();
2913 } 3034 }
2914 #endif 3035 #endif
2915 3036
2916 3037
3038 void MarkCompactCollector::UpdateSlots(SlotsBuffer* buffer) {
3039 PointersUpdatingVisitor v(heap_);
3040 size_t buffer_size = buffer->Size();
3041
3042 for (size_t slot_idx = 0; slot_idx < buffer_size; ++slot_idx) {
3043 SlotsBuffer::ObjectSlot slot = buffer->Get(slot_idx);
3044 if (!SlotsBuffer::IsTypedSlot(slot)) {
3045 PointersUpdatingVisitor::UpdateSlot(heap_, slot);
3046 } else {
3047 ++slot_idx;
3048 DCHECK(slot_idx < buffer_size);
3049 UpdateSlot(heap_->isolate(), &v, DecodeSlotType(slot),
3050 reinterpret_cast<Address>(buffer->Get(slot_idx)));
3051 }
3052 }
3053 }
3054
3055
3056 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) {
3057 while (buffer != NULL) {
3058 UpdateSlots(buffer);
3059 buffer = buffer->next();
3060 }
3061 }
3062
3063
2917 static void UpdatePointer(HeapObject** address, HeapObject* object) { 3064 static void UpdatePointer(HeapObject** address, HeapObject* object) {
2918 MapWord map_word = object->map_word(); 3065 MapWord map_word = object->map_word();
2919 // The store buffer can still contain stale pointers in dead large objects. 3066 // The store buffer can still contain stale pointers in dead large objects.
2920 // Ignore these pointers here. 3067 // Ignore these pointers here.
2921 DCHECK(map_word.IsForwardingAddress() || 3068 DCHECK(map_word.IsForwardingAddress() ||
2922 object->GetHeap()->lo_space()->FindPage( 3069 object->GetHeap()->lo_space()->FindPage(
2923 reinterpret_cast<Address>(address)) != NULL); 3070 reinterpret_cast<Address>(address)) != NULL);
2924 if (map_word.IsForwardingAddress()) { 3071 if (map_word.IsForwardingAddress()) {
2925 // Update the corresponding slot. 3072 // Update the corresponding slot.
2926 *address = map_word.ToForwardingAddress(); 3073 *address = map_word.ToForwardingAddress();
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after
3260 // Unlink the page from the list of pages here. We must not iterate 3407 // Unlink the page from the list of pages here. We must not iterate
3261 // over that page later (e.g. when scan on scavenge pages are 3408 // over that page later (e.g. when scan on scavenge pages are
3262 // processed). The page itself will be freed later and is still 3409 // processed). The page itself will be freed later and is still
3263 // reachable from the evacuation candidates list. 3410 // reachable from the evacuation candidates list.
3264 p->Unlink(); 3411 p->Unlink();
3265 } else { 3412 } else {
3266 // Without room for expansion evacuation is not guaranteed to succeed. 3413 // Without room for expansion evacuation is not guaranteed to succeed.
3267 // Pessimistically abandon unevacuated pages. 3414 // Pessimistically abandon unevacuated pages.
3268 for (int j = i; j < npages; j++) { 3415 for (int j = i; j < npages; j++) {
3269 Page* page = evacuation_candidates_[j]; 3416 Page* page = evacuation_candidates_[j];
3270 slots_buffer_allocator_.DeallocateChain(page->slots_buffer_address()); 3417 slots_buffer_allocator_->DeallocateChain(
3418 page->slots_buffer_address());
3271 page->ClearEvacuationCandidate(); 3419 page->ClearEvacuationCandidate();
3272 page->SetFlag(Page::RESCAN_ON_EVACUATION); 3420 page->SetFlag(Page::RESCAN_ON_EVACUATION);
3273 } 3421 }
3274 abandoned_pages = npages - i; 3422 abandoned_pages = npages - i;
3275 break; 3423 break;
3276 } 3424 }
3277 } 3425 }
3278 } 3426 }
3279 if (npages > 0) { 3427 if (npages > 0) {
3280 // Release emergency memory. 3428 // Release emergency memory.
(...skipping 26 matching lines...) Expand all
3307 MapWord map_word = heap_object->map_word(); 3455 MapWord map_word = heap_object->map_word();
3308 if (map_word.IsForwardingAddress()) { 3456 if (map_word.IsForwardingAddress()) {
3309 return map_word.ToForwardingAddress(); 3457 return map_word.ToForwardingAddress();
3310 } 3458 }
3311 } 3459 }
3312 return object; 3460 return object;
3313 } 3461 }
3314 }; 3462 };
3315 3463
3316 3464
3317 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
3318 SlotsBuffer::SlotType slot_type, Address addr) {
3319 switch (slot_type) {
3320 case SlotsBuffer::CODE_TARGET_SLOT: {
3321 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL);
3322 rinfo.Visit(isolate, v);
3323 break;
3324 }
3325 case SlotsBuffer::CELL_TARGET_SLOT: {
3326 RelocInfo rinfo(addr, RelocInfo::CELL, 0, NULL);
3327 rinfo.Visit(isolate, v);
3328 break;
3329 }
3330 case SlotsBuffer::CODE_ENTRY_SLOT: {
3331 v->VisitCodeEntry(addr);
3332 break;
3333 }
3334 case SlotsBuffer::RELOCATED_CODE_OBJECT: {
3335 HeapObject* obj = HeapObject::FromAddress(addr);
3336 Code::cast(obj)->CodeIterateBody(v);
3337 break;
3338 }
3339 case SlotsBuffer::DEBUG_TARGET_SLOT: {
3340 RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, NULL);
3341 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
3342 break;
3343 }
3344 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
3345 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
3346 rinfo.Visit(isolate, v);
3347 break;
3348 }
3349 case SlotsBuffer::OBJECT_SLOT: {
3350 v->VisitPointer(reinterpret_cast<Object**>(addr));
3351 break;
3352 }
3353 default:
3354 UNREACHABLE();
3355 break;
3356 }
3357 }
3358
3359
3360 enum SweepingMode { SWEEP_ONLY, SWEEP_AND_VISIT_LIVE_OBJECTS }; 3465 enum SweepingMode { SWEEP_ONLY, SWEEP_AND_VISIT_LIVE_OBJECTS };
3361 3466
3362 3467
3363 enum SkipListRebuildingMode { REBUILD_SKIP_LIST, IGNORE_SKIP_LIST }; 3468 enum SkipListRebuildingMode { REBUILD_SKIP_LIST, IGNORE_SKIP_LIST };
3364 3469
3365 3470
3366 enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; 3471 enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE };
3367 3472
3368 3473
3369 template <MarkCompactCollector::SweepingParallelism mode> 3474 template <MarkCompactCollector::SweepingParallelism mode>
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
3462 // When concurrent sweeping is active, the page will be marked after 3567 // When concurrent sweeping is active, the page will be marked after
3463 // sweeping by the main thread. 3568 // sweeping by the main thread.
3464 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); 3569 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE);
3465 } else { 3570 } else {
3466 p->SetWasSwept(); 3571 p->SetWasSwept();
3467 } 3572 }
3468 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3573 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3469 } 3574 }
3470 3575
3471 3576
3472 static bool IsOnInvalidatedCodeObject(Address addr) {
3473 // We did not record any slots in large objects thus
3474 // we can safely go to the page from the slot address.
3475 Page* p = Page::FromAddress(addr);
3476
3477 // First check owner's identity because old space is swept concurrently or
3478 // lazily and might still have non-zero mark-bits on some pages.
3479 if (p->owner()->identity() != CODE_SPACE) return false;
3480
3481 // In code space only bits on evacuation candidates (but we don't record
3482 // any slots on them) and under invalidated code objects are non-zero.
3483 MarkBit mark_bit =
3484 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr));
3485
3486 return Marking::IsBlackOrGrey(mark_bit);
3487 }
3488
3489
3490 void MarkCompactCollector::InvalidateCode(Code* code) { 3577 void MarkCompactCollector::InvalidateCode(Code* code) {
3491 if (heap_->incremental_marking()->IsCompacting() && 3578 if (heap_->incremental_marking()->IsCompacting() &&
3492 !ShouldSkipEvacuationSlotRecording(code)) { 3579 !ShouldSkipEvacuationSlotRecording(code)) {
3493 DCHECK(compacting_); 3580 DCHECK(compacting_);
3494 3581
3495 // If the object is white than no slots were recorded on it yet. 3582 // If the object is white than no slots were recorded on it yet.
3496 MarkBit mark_bit = Marking::MarkBitFrom(code); 3583 MarkBit mark_bit = Marking::MarkBitFrom(code);
3497 if (Marking::IsWhite(mark_bit)) return; 3584 if (Marking::IsWhite(mark_bit)) return;
3498 3585
3499 // Ignore all slots that might have been recorded in the body of the 3586 // Ignore all slots that might have been recorded in the body of the
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 GCTracer::Scope gc_scope(heap()->tracer(), 3664 GCTracer::Scope gc_scope(heap()->tracer(),
3578 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); 3665 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3579 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), 3666 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3580 &Heap::ScavengeStoreBufferCallback); 3667 &Heap::ScavengeStoreBufferCallback);
3581 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 3668 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
3582 } 3669 }
3583 3670
3584 { 3671 {
3585 GCTracer::Scope gc_scope(heap()->tracer(), 3672 GCTracer::Scope gc_scope(heap()->tracer(),
3586 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3673 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
3587 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); 3674 UpdateSlotsRecordedIn(migration_slots_buffer_);
3588 if (FLAG_trace_fragmentation_verbose) { 3675 if (FLAG_trace_fragmentation_verbose) {
3589 PrintF(" migration slots buffer: %d\n", 3676 PrintF(" migration slots buffer: %d\n",
3590 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3677 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3591 } 3678 }
3592 } 3679 }
3593 3680
3594 int npages = evacuation_candidates_.length(); 3681 int npages = evacuation_candidates_.length();
3595 { 3682 {
3596 GCTracer::Scope gc_scope( 3683 GCTracer::Scope gc_scope(
3597 heap()->tracer(), 3684 heap()->tracer(),
3598 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3685 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
3599 for (int i = 0; i < npages; i++) { 3686 for (int i = 0; i < npages; i++) {
3600 Page* p = evacuation_candidates_[i]; 3687 Page* p = evacuation_candidates_[i];
3601 DCHECK(p->IsEvacuationCandidate() || 3688 DCHECK(p->IsEvacuationCandidate() ||
3602 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3689 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3603 3690
3604 if (p->IsEvacuationCandidate()) { 3691 if (p->IsEvacuationCandidate()) {
3605 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer()); 3692 UpdateSlotsRecordedIn(p->slots_buffer());
3606 if (FLAG_trace_fragmentation_verbose) { 3693 if (FLAG_trace_fragmentation_verbose) {
3607 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), 3694 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3608 SlotsBuffer::SizeOfChain(p->slots_buffer())); 3695 SlotsBuffer::SizeOfChain(p->slots_buffer()));
3609 } 3696 }
3610 3697
3611 // Important: skip list should be cleared only after roots were updated 3698 // Important: skip list should be cleared only after roots were updated
3612 // because root iteration traverses the stack and might have to find 3699 // because root iteration traverses the stack and might have to find
3613 // code objects from non-updated pc pointing into evacuation candidate. 3700 // code objects from non-updated pc pointing into evacuation candidate.
3614 SkipList* list = p->skip_list(); 3701 SkipList* list = p->skip_list();
3615 if (list != NULL) list->Clear(); 3702 if (list != NULL) list->Clear();
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
3653 3740
3654 // Update pointers from external string table. 3741 // Update pointers from external string table.
3655 heap_->UpdateReferencesInExternalStringTable( 3742 heap_->UpdateReferencesInExternalStringTable(
3656 &UpdateReferenceInExternalStringTableEntry); 3743 &UpdateReferenceInExternalStringTableEntry);
3657 3744
3658 EvacuationWeakObjectRetainer evacuation_object_retainer; 3745 EvacuationWeakObjectRetainer evacuation_object_retainer;
3659 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3746 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3660 3747
3661 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); 3748 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3662 3749
3663 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); 3750 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3664 DCHECK(migration_slots_buffer_ == NULL); 3751 DCHECK(migration_slots_buffer_ == NULL);
3665 3752
3666 // The hashing of weak_object_to_code_table is no longer valid. 3753 // The hashing of weak_object_to_code_table is no longer valid.
3667 heap()->weak_object_to_code_table()->Rehash( 3754 heap()->weak_object_to_code_table()->Rehash(
3668 heap()->isolate()->factory()->undefined_value()); 3755 heap()->isolate()->factory()->undefined_value());
3669 } 3756 }
3670 3757
3671 3758
3672 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { 3759 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() {
3673 int npages = evacuation_candidates_.length(); 3760 int npages = evacuation_candidates_.length();
3674 for (int i = 0; i < npages; i++) { 3761 for (int i = 0; i < npages; i++) {
3675 Page* p = evacuation_candidates_[i]; 3762 Page* p = evacuation_candidates_[i];
3676 if (!p->IsEvacuationCandidate()) continue; 3763 if (!p->IsEvacuationCandidate()) continue;
3677 p->Unlink(); 3764 p->Unlink();
3678 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3765 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3679 p->InsertAfter(space->LastPage()); 3766 p->InsertAfter(space->LastPage());
3680 } 3767 }
3681 } 3768 }
3682 3769
3683 3770
3684 void MarkCompactCollector::ReleaseEvacuationCandidates() { 3771 void MarkCompactCollector::ReleaseEvacuationCandidates() {
3685 int npages = evacuation_candidates_.length(); 3772 int npages = evacuation_candidates_.length();
3686 for (int i = 0; i < npages; i++) { 3773 for (int i = 0; i < npages; i++) {
3687 Page* p = evacuation_candidates_[i]; 3774 Page* p = evacuation_candidates_[i];
3688 if (!p->IsEvacuationCandidate()) continue; 3775 if (!p->IsEvacuationCandidate()) continue;
3689 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3776 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3690 space->Free(p->area_start(), p->area_size()); 3777 space->Free(p->area_start(), p->area_size());
3691 p->set_scan_on_scavenge(false); 3778 p->set_scan_on_scavenge(false);
3692 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); 3779 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
3693 p->ResetLiveBytes(); 3780 p->ResetLiveBytes();
3694 space->ReleasePage(p); 3781 space->ReleasePage(p);
3695 } 3782 }
3696 evacuation_candidates_.Rewind(0); 3783 evacuation_candidates_.Rewind(0);
3697 compacting_ = false; 3784 compacting_ = false;
3698 heap()->FilterStoreBufferEntriesOnAboutToBeFreedPages(); 3785 heap()->FilterStoreBufferEntriesOnAboutToBeFreedPages();
3699 heap()->FreeQueuedChunks(); 3786 heap()->FreeQueuedChunks();
3700 } 3787 }
3701 3788
3702 3789
(...skipping 672 matching lines...) Expand 10 before | Expand all | Expand 10 after
4375 4462
4376 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); } 4463 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); }
4377 4464
4378 4465
4379 void MarkCompactCollector::Initialize() { 4466 void MarkCompactCollector::Initialize() {
4380 MarkCompactMarkingVisitor::Initialize(); 4467 MarkCompactMarkingVisitor::Initialize();
4381 IncrementalMarking::Initialize(); 4468 IncrementalMarking::Initialize();
4382 } 4469 }
4383 4470
4384 4471
4385 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) {
4386 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES;
4387 }
4388
4389
4390 bool SlotsBuffer::AddToSynchronized(SlotsBufferAllocator* allocator,
4391 SlotsBuffer** buffer_address,
4392 base::Mutex* buffer_mutex, SlotType type,
4393 Address addr, AdditionMode mode) {
4394 base::LockGuard<base::Mutex> lock_guard(buffer_mutex);
4395 return AddTo(allocator, buffer_address, type, addr, mode);
4396 }
4397
4398
4399 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
4400 SlotsBuffer** buffer_address, SlotType type,
4401 Address addr, AdditionMode mode) {
4402 SlotsBuffer* buffer = *buffer_address;
4403 if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) {
4404 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) {
4405 allocator->DeallocateChain(buffer_address);
4406 return false;
4407 }
4408 buffer = allocator->AllocateBuffer(buffer);
4409 *buffer_address = buffer;
4410 }
4411 DCHECK(buffer->HasSpaceForTypedSlot());
4412 buffer->Add(reinterpret_cast<ObjectSlot>(type));
4413 buffer->Add(reinterpret_cast<ObjectSlot>(addr));
4414 return true;
4415 }
4416
4417
4418 void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
4419 // Remove entries by replacing them with an old-space slot containing a smi
4420 // that is located in an unmovable page.
4421 const ObjectSlot kRemovedEntry = HeapObject::RawField(
4422 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
4423 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
4424 ->NeverEvacuate());
4425
4426 while (buffer != NULL) {
4427 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4428 intptr_t slots_count = buffer->idx_;
4429
4430 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4431 ObjectSlot slot = slots[slot_idx];
4432 if (!IsTypedSlot(slot)) {
4433 Object* object = *slot;
4434 // Slots are invalid when they currently:
4435 // - do not point to a heap object (SMI)
4436 // - point to a heap object in new space
4437 // - are not within a live heap object on a valid pointer slot
4438 // - point to a heap object not on an evacuation candidate
4439 if (!object->IsHeapObject() || heap->InNewSpace(object) ||
4440 !heap->mark_compact_collector()->IsSlotInLiveObject(
4441 reinterpret_cast<Address>(slot)) ||
4442 !Page::FromAddress(reinterpret_cast<Address>(object))
4443 ->IsEvacuationCandidate()) {
4444 // TODO(hpayer): Instead of replacing slots with kRemovedEntry we
4445 // could shrink the slots buffer in-place.
4446 slots[slot_idx] = kRemovedEntry;
4447 }
4448 } else {
4449 ++slot_idx;
4450 DCHECK(slot_idx < slots_count);
4451 }
4452 }
4453 buffer = buffer->next();
4454 }
4455 }
4456
4457
4458 void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer,
4459 Address start_slot, Address end_slot) {
4460 // Remove entries by replacing them with an old-space slot containing a smi
4461 // that is located in an unmovable page.
4462 const ObjectSlot kRemovedEntry = HeapObject::RawField(
4463 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
4464 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
4465 ->NeverEvacuate());
4466
4467 while (buffer != NULL) {
4468 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4469 intptr_t slots_count = buffer->idx_;
4470 bool is_typed_slot = false;
4471
4472 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4473 ObjectSlot slot = slots[slot_idx];
4474 if (!IsTypedSlot(slot)) {
4475 Address slot_address = reinterpret_cast<Address>(slot);
4476 if (slot_address >= start_slot && slot_address < end_slot) {
4477 // TODO(hpayer): Instead of replacing slots with kRemovedEntry we
4478 // could shrink the slots buffer in-place.
4479 slots[slot_idx] = kRemovedEntry;
4480 if (is_typed_slot) {
4481 slots[slot_idx - 1] = kRemovedEntry;
4482 }
4483 }
4484 is_typed_slot = false;
4485 } else {
4486 is_typed_slot = true;
4487 DCHECK(slot_idx < slots_count);
4488 }
4489 }
4490 buffer = buffer->next();
4491 }
4492 }
4493
4494
4495 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
4496 while (buffer != NULL) {
4497 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4498 intptr_t slots_count = buffer->idx_;
4499
4500 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4501 ObjectSlot slot = slots[slot_idx];
4502 if (!IsTypedSlot(slot)) {
4503 Object* object = *slot;
4504 if (object->IsHeapObject()) {
4505 HeapObject* heap_object = HeapObject::cast(object);
4506 CHECK(!heap->InNewSpace(object));
4507 heap->mark_compact_collector()->VerifyIsSlotInLiveObject(
4508 reinterpret_cast<Address>(slot), heap_object);
4509 }
4510 } else {
4511 ++slot_idx;
4512 DCHECK(slot_idx < slots_count);
4513 }
4514 }
4515 buffer = buffer->next();
4516 }
4517 }
4518
4519
4520 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
4521 if (RelocInfo::IsCodeTarget(rmode)) {
4522 return SlotsBuffer::CODE_TARGET_SLOT;
4523 } else if (RelocInfo::IsCell(rmode)) {
4524 return SlotsBuffer::CELL_TARGET_SLOT;
4525 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
4526 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
4527 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
4528 return SlotsBuffer::DEBUG_TARGET_SLOT;
4529 }
4530 UNREACHABLE();
4531 return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
4532 }
4533
4534
4535 void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
4536 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
4537 RelocInfo::Mode rmode = rinfo->rmode();
4538 if (target_page->IsEvacuationCandidate() &&
4539 (rinfo->host() == NULL ||
4540 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
4541 Address addr = rinfo->pc();
4542 SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode);
4543 if (rinfo->IsInConstantPool()) {
4544 addr = rinfo->constant_pool_entry_address();
4545 if (RelocInfo::IsCodeTarget(rmode)) {
4546 slot_type = SlotsBuffer::CODE_ENTRY_SLOT;
4547 } else {
4548 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
4549 slot_type = SlotsBuffer::OBJECT_SLOT;
4550 }
4551 }
4552 bool success = SlotsBuffer::AddTo(
4553 &slots_buffer_allocator_, target_page->slots_buffer_address(),
4554 slot_type, addr, SlotsBuffer::FAIL_ON_OVERFLOW);
4555 if (!success) {
4556 EvictPopularEvacuationCandidate(target_page);
4557 }
4558 }
4559 }
4560
4561
4562 void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) { 4472 void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
4563 if (FLAG_trace_fragmentation) { 4473 if (FLAG_trace_fragmentation) {
4564 PrintF("Page %p is too popular. Disabling evacuation.\n", 4474 PrintF("Page %p is too popular. Disabling evacuation.\n",
4565 reinterpret_cast<void*>(page)); 4475 reinterpret_cast<void*>(page));
4566 } 4476 }
4567 4477
4568 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow); 4478 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow);
4569 4479
4570 // TODO(gc) If all evacuation candidates are too popular we 4480 // TODO(gc) If all evacuation candidates are too popular we
4571 // should stop slots recording entirely. 4481 // should stop slots recording entirely.
4572 page->ClearEvacuationCandidate(); 4482 page->ClearEvacuationCandidate();
4573 4483
4574 DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE)); 4484 DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE));
4575 page->SetFlag(Page::POPULAR_PAGE); 4485 page->SetFlag(Page::POPULAR_PAGE);
4576 4486
4577 // We were not collecting slots on this page that point 4487 // We were not collecting slots on this page that point
4578 // to other evacuation candidates thus we have to 4488 // to other evacuation candidates thus we have to
4579 // rescan the page after evacuation to discover and update all 4489 // rescan the page after evacuation to discover and update all
4580 // pointers to evacuated objects. 4490 // pointers to evacuated objects.
4581 page->SetFlag(Page::RESCAN_ON_EVACUATION); 4491 page->SetFlag(Page::RESCAN_ON_EVACUATION);
4582 } 4492 }
4583 4493
4584 4494
4585 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot, 4495 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
4586 Code* target) { 4496 Code* target) {
4587 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 4497 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
4588 if (target_page->IsEvacuationCandidate() && 4498 if (target_page->IsEvacuationCandidate() &&
4589 !ShouldSkipEvacuationSlotRecording(object)) { 4499 !ShouldSkipEvacuationSlotRecording(object)) {
4590 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, 4500 if (!SlotsBuffer::AddTo(slots_buffer_allocator_,
4591 target_page->slots_buffer_address(), 4501 target_page->slots_buffer_address(),
4592 SlotsBuffer::CODE_ENTRY_SLOT, slot, 4502 SlotsBuffer::CODE_ENTRY_SLOT, slot,
4593 SlotsBuffer::FAIL_ON_OVERFLOW)) { 4503 SlotsBuffer::FAIL_ON_OVERFLOW)) {
4594 EvictPopularEvacuationCandidate(target_page); 4504 EvictPopularEvacuationCandidate(target_page);
4595 } 4505 }
4596 } 4506 }
4597 } 4507 }
4598 4508
4599 4509
4600 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 4510 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4601 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 4511 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4602 if (is_compacting()) { 4512 if (is_compacting()) {
4603 Code* host = 4513 Code* host =
4604 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 4514 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
4605 pc); 4515 pc);
4606 MarkBit mark_bit = Marking::MarkBitFrom(host); 4516 MarkBit mark_bit = Marking::MarkBitFrom(host);
4607 if (Marking::IsBlack(mark_bit)) { 4517 if (Marking::IsBlack(mark_bit)) {
4608 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); 4518 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
4609 RecordRelocSlot(&rinfo, target); 4519 RecordRelocSlot(&rinfo, target);
4610 } 4520 }
4611 } 4521 }
4612 } 4522 }
4613 4523
4614
4615 static inline SlotsBuffer::SlotType DecodeSlotType(
4616 SlotsBuffer::ObjectSlot slot) {
4617 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
4618 }
4619
4620
4621 void SlotsBuffer::UpdateSlots(Heap* heap) {
4622 PointersUpdatingVisitor v(heap);
4623
4624 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
4625 ObjectSlot slot = slots_[slot_idx];
4626 if (!IsTypedSlot(slot)) {
4627 PointersUpdatingVisitor::UpdateSlot(heap, slot);
4628 } else {
4629 ++slot_idx;
4630 DCHECK(slot_idx < idx_);
4631 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot),
4632 reinterpret_cast<Address>(slots_[slot_idx]));
4633 }
4634 }
4635 }
4636
4637
4638 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) {
4639 PointersUpdatingVisitor v(heap);
4640
4641 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
4642 ObjectSlot slot = slots_[slot_idx];
4643 if (!IsTypedSlot(slot)) {
4644 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) {
4645 PointersUpdatingVisitor::UpdateSlot(heap, slot);
4646 }
4647 } else {
4648 ++slot_idx;
4649 DCHECK(slot_idx < idx_);
4650 Address pc = reinterpret_cast<Address>(slots_[slot_idx]);
4651 if (!IsOnInvalidatedCodeObject(pc)) {
4652 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot),
4653 reinterpret_cast<Address>(slots_[slot_idx]));
4654 }
4655 }
4656 }
4657 }
4658
4659
4660 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) {
4661 return new SlotsBuffer(next_buffer);
4662 }
4663
4664
4665 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) {
4666 delete buffer;
4667 }
4668
4669
4670 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) {
4671 SlotsBuffer* buffer = *buffer_address;
4672 while (buffer != NULL) {
4673 SlotsBuffer* next_buffer = buffer->next();
4674 DeallocateBuffer(buffer);
4675 buffer = next_buffer;
4676 }
4677 *buffer_address = NULL;
4678 }
4679 } // namespace internal 4524 } // namespace internal
4680 } // namespace v8 4525 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698