Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2176133002: Revert of [heap] Remove black pages and use black areas instead. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
97 97
98 private: 98 private:
99 Heap* heap_; 99 Heap* heap_;
100 }; 100 };
101 101
102 102
103 static void VerifyMarking(Heap* heap, Address bottom, Address top) { 103 static void VerifyMarking(Heap* heap, Address bottom, Address top) {
104 VerifyMarkingVisitor visitor(heap); 104 VerifyMarkingVisitor visitor(heap);
105 HeapObject* object; 105 HeapObject* object;
106 Address next_object_must_be_here_or_later = bottom; 106 Address next_object_must_be_here_or_later = bottom;
107 for (Address current = bottom; current < top;) { 107
108 for (Address current = bottom; current < top; current += kPointerSize) {
108 object = HeapObject::FromAddress(current); 109 object = HeapObject::FromAddress(current);
109 if (MarkCompactCollector::IsMarked(object)) { 110 if (MarkCompactCollector::IsMarked(object)) {
110 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 111 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
111 CHECK(current >= next_object_must_be_here_or_later); 112 CHECK(current >= next_object_must_be_here_or_later);
112 object->Iterate(&visitor); 113 object->Iterate(&visitor);
113 next_object_must_be_here_or_later = current + object->Size(); 114 next_object_must_be_here_or_later = current + object->Size();
114 // The object is either part of a black area of black allocation or a 115 // The next word for sure belongs to the current object, jump over it.
115 // regular black object
116 Page* page = Page::FromAddress(current);
117 CHECK(
118 page->markbits()->AllBitsSetInRange(
119 page->AddressToMarkbitIndex(current),
120 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
121 page->markbits()->AllBitsClearInRange(
122 page->AddressToMarkbitIndex(current + kPointerSize * 2),
123 page->AddressToMarkbitIndex(next_object_must_be_here_or_later)));
124 current = next_object_must_be_here_or_later;
125 } else {
126 current += kPointerSize; 116 current += kPointerSize;
127 } 117 }
128 } 118 }
129 } 119 }
130 120
121 static void VerifyMarkingBlackPage(Heap* heap, Page* page) {
122 CHECK(page->IsFlagSet(Page::BLACK_PAGE));
123 VerifyMarkingVisitor visitor(heap);
124 HeapObjectIterator it(page);
125 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
126 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
127 object->Iterate(&visitor);
128 }
129 }
130
131 static void VerifyMarking(NewSpace* space) { 131 static void VerifyMarking(NewSpace* space) {
132 Address end = space->top(); 132 Address end = space->top();
133 // The bottom position is at the start of its page. Allows us to use 133 // The bottom position is at the start of its page. Allows us to use
134 // page->area_start() as start of range on all pages. 134 // page->area_start() as start of range on all pages.
135 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start()); 135 CHECK_EQ(space->bottom(), Page::FromAddress(space->bottom())->area_start());
136 136
137 NewSpacePageRange range(space->bottom(), end); 137 NewSpacePageRange range(space->bottom(), end);
138 for (auto it = range.begin(); it != range.end();) { 138 for (auto it = range.begin(); it != range.end();) {
139 Page* page = *(it++); 139 Page* page = *(it++);
140 Address limit = it != range.end() ? page->area_end() : end; 140 Address limit = it != range.end() ? page->area_end() : end;
141 CHECK(limit == end || !page->Contains(end)); 141 CHECK(limit == end || !page->Contains(end));
142 VerifyMarking(space->heap(), page->area_start(), limit); 142 VerifyMarking(space->heap(), page->area_start(), limit);
143 } 143 }
144 } 144 }
145 145
146 146
147 static void VerifyMarking(PagedSpace* space) { 147 static void VerifyMarking(PagedSpace* space) {
148 for (Page* p : *space) { 148 for (Page* p : *space) {
149 VerifyMarking(space->heap(), p->area_start(), p->area_end()); 149 if (p->IsFlagSet(Page::BLACK_PAGE)) {
150 VerifyMarkingBlackPage(space->heap(), p);
151 } else {
152 VerifyMarking(space->heap(), p->area_start(), p->area_end());
153 }
150 } 154 }
151 } 155 }
152 156
153 157
154 static void VerifyMarking(Heap* heap) { 158 static void VerifyMarking(Heap* heap) {
155 VerifyMarking(heap->old_space()); 159 VerifyMarking(heap->old_space());
156 VerifyMarking(heap->code_space()); 160 VerifyMarking(heap->code_space());
157 VerifyMarking(heap->map_space()); 161 VerifyMarking(heap->map_space());
158 VerifyMarking(heap->new_space()); 162 VerifyMarking(heap->new_space());
159 163
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
398 Map* map = Map::cast(obj); 402 Map* map = Map::cast(obj);
399 map->VerifyOmittedMapChecks(); 403 map->VerifyOmittedMapChecks();
400 } 404 }
401 } 405 }
402 #endif // VERIFY_HEAP 406 #endif // VERIFY_HEAP
403 407
404 408
405 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { 409 static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
406 for (Page* p : *space) { 410 for (Page* p : *space) {
407 p->ClearLiveness(); 411 p->ClearLiveness();
412 if (p->IsFlagSet(Page::BLACK_PAGE)) {
413 p->ClearFlag(Page::BLACK_PAGE);
414 }
408 } 415 }
409 } 416 }
410 417
411 418
412 static void ClearMarkbitsInNewSpace(NewSpace* space) { 419 static void ClearMarkbitsInNewSpace(NewSpace* space) {
413 for (Page* page : *space) { 420 for (Page* page : *space) {
414 page->ClearLiveness(); 421 page->ClearLiveness();
415 } 422 }
416 } 423 }
417 424
418 425
419 void MarkCompactCollector::ClearMarkbits() { 426 void MarkCompactCollector::ClearMarkbits() {
420 ClearMarkbitsInPagedSpace(heap_->code_space()); 427 ClearMarkbitsInPagedSpace(heap_->code_space());
421 ClearMarkbitsInPagedSpace(heap_->map_space()); 428 ClearMarkbitsInPagedSpace(heap_->map_space());
422 ClearMarkbitsInPagedSpace(heap_->old_space()); 429 ClearMarkbitsInPagedSpace(heap_->old_space());
423 ClearMarkbitsInNewSpace(heap_->new_space()); 430 ClearMarkbitsInNewSpace(heap_->new_space());
424 431
425 LargeObjectIterator it(heap_->lo_space()); 432 LargeObjectIterator it(heap_->lo_space());
426 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 433 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
427 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); 434 Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj));
428 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 435 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
429 chunk->ResetProgressBar(); 436 chunk->ResetProgressBar();
430 chunk->ResetLiveBytes(); 437 chunk->ResetLiveBytes();
438 if (chunk->IsFlagSet(Page::BLACK_PAGE)) {
439 chunk->ClearFlag(Page::BLACK_PAGE);
440 }
431 } 441 }
432 } 442 }
433 443
434 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { 444 class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task {
435 public: 445 public:
436 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks, 446 SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks,
437 AllocationSpace space_to_start) 447 AllocationSpace space_to_start)
438 : sweeper_(sweeper), 448 : sweeper_(sweeper),
439 pending_sweeper_tasks_(pending_sweeper_tasks), 449 pending_sweeper_tasks_(pending_sweeper_tasks),
440 space_to_start_(space_to_start) {} 450 space_to_start_(space_to_start) {}
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
642 int number_of_pages = space->CountTotalPages(); 652 int number_of_pages = space->CountTotalPages();
643 int area_size = space->AreaSize(); 653 int area_size = space->AreaSize();
644 654
645 // Pairs of (live_bytes_in_page, page). 655 // Pairs of (live_bytes_in_page, page).
646 typedef std::pair<int, Page*> LiveBytesPagePair; 656 typedef std::pair<int, Page*> LiveBytesPagePair;
647 std::vector<LiveBytesPagePair> pages; 657 std::vector<LiveBytesPagePair> pages;
648 pages.reserve(number_of_pages); 658 pages.reserve(number_of_pages);
649 659
650 for (Page* p : *space) { 660 for (Page* p : *space) {
651 if (p->NeverEvacuate()) continue; 661 if (p->NeverEvacuate()) continue;
662 if (p->IsFlagSet(Page::BLACK_PAGE)) continue;
652 // Invariant: Evacuation candidates are just created when marking is 663 // Invariant: Evacuation candidates are just created when marking is
653 // started. This means that sweeping has finished. Furthermore, at the end 664 // started. This means that sweeping has finished. Furthermore, at the end
654 // of a GC all evacuation candidates are cleared and their slot buffers are 665 // of a GC all evacuation candidates are cleared and their slot buffers are
655 // released. 666 // released.
656 CHECK(!p->IsEvacuationCandidate()); 667 CHECK(!p->IsEvacuationCandidate());
657 CHECK_NULL(p->old_to_old_slots()); 668 CHECK_NULL(p->old_to_old_slots());
658 CHECK_NULL(p->typed_old_to_old_slots()); 669 CHECK_NULL(p->typed_old_to_old_slots());
659 CHECK(p->SweepingDone()); 670 CHECK(p->SweepingDone());
660 DCHECK(p->area_size() == area_size); 671 DCHECK(p->area_size() == area_size);
661 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); 672 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
(...skipping 1233 matching lines...) Expand 10 before | Expand all | Expand 10 after
1895 object->IterateBody(&visitor); 1906 object->IterateBody(&visitor);
1896 return true; 1907 return true;
1897 } 1908 }
1898 1909
1899 private: 1910 private:
1900 Heap* heap_; 1911 Heap* heap_;
1901 }; 1912 };
1902 1913
1903 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { 1914 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
1904 for (Page* p : *space) { 1915 for (Page* p : *space) {
1905 DiscoverGreyObjectsOnPage(p); 1916 if (!p->IsFlagSet(Page::BLACK_PAGE)) {
1917 DiscoverGreyObjectsOnPage(p);
1918 }
1906 if (marking_deque()->IsFull()) return; 1919 if (marking_deque()->IsFull()) return;
1907 } 1920 }
1908 } 1921 }
1909 1922
1910 1923
1911 void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() { 1924 void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
1912 NewSpace* space = heap()->new_space(); 1925 NewSpace* space = heap()->new_space();
1913 for (Page* page : NewSpacePageRange(space->bottom(), space->top())) { 1926 for (Page* page : NewSpacePageRange(space->bottom(), space->top())) {
1914 DiscoverGreyObjectsOnPage(page); 1927 DiscoverGreyObjectsOnPage(page);
1915 if (marking_deque()->IsFull()) return; 1928 if (marking_deque()->IsFull()) return;
(...skipping 993 matching lines...) Expand 10 before | Expand all | Expand 10 after
2909 } 2922 }
2910 2923
2911 return String::cast(*p); 2924 return String::cast(*p);
2912 } 2925 }
2913 2926
2914 bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) { 2927 bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) {
2915 Space* owner = p->owner(); 2928 Space* owner = p->owner();
2916 DCHECK(owner != heap_->lo_space() && owner != nullptr); 2929 DCHECK(owner != heap_->lo_space() && owner != nullptr);
2917 USE(owner); 2930 USE(owner);
2918 2931
2919 // We may be part of a black area. 2932 // If we are on a black page, we cannot find the actual object start
2920 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(slot))) { 2933 // easiliy. We just return true but do not set the out_object.
2934 if (p->IsFlagSet(Page::BLACK_PAGE)) {
2921 return true; 2935 return true;
2922 } 2936 }
2923 2937
2924 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); 2938 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
2925 unsigned int cell_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; 2939 unsigned int cell_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
2926 MarkBit::CellType index_mask = 1u << Bitmap::IndexInCell(mark_bit_index); 2940 MarkBit::CellType index_mask = 1u << Bitmap::IndexInCell(mark_bit_index);
2927 MarkBit::CellType* cells = p->markbits()->cells(); 2941 MarkBit::CellType* cells = p->markbits()->cells();
2928 Address base_address = p->area_start(); 2942 Address base_address = p->area_start();
2929 unsigned int base_address_cell_index = Bitmap::IndexToCell( 2943 unsigned int base_address_cell_index = Bitmap::IndexToCell(
2930 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(base_address))); 2944 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(base_address)));
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
3007 // for it. 3021 // for it.
3008 CHECK(large_object->IsHeapObject()); 3022 CHECK(large_object->IsHeapObject());
3009 HeapObject* large_heap_object = HeapObject::cast(large_object); 3023 HeapObject* large_heap_object = HeapObject::cast(large_object);
3010 3024
3011 if (IsMarked(large_heap_object)) { 3025 if (IsMarked(large_heap_object)) {
3012 return large_heap_object; 3026 return large_heap_object;
3013 } 3027 }
3014 return nullptr; 3028 return nullptr;
3015 } 3029 }
3016 3030
3017 LiveObjectIterator<kBlackObjects> it(p); 3031 if (p->IsFlagSet(Page::BLACK_PAGE)) {
3018 HeapObject* object = nullptr; 3032 HeapObjectIterator it(p);
3019 while ((object = it.Next()) != nullptr) { 3033 HeapObject* object = nullptr;
3020 int size = object->Size(); 3034 while ((object = it.Next()) != nullptr) {
3021 if (object->address() > slot) return nullptr; 3035 int size = object->Size();
3022 if (object->address() <= slot && slot < (object->address() + size)) { 3036 if (object->address() > slot) return nullptr;
3023 return object; 3037 if (object->address() <= slot && slot < (object->address() + size)) {
3038 return object;
3039 }
3040 }
3041 } else {
3042 LiveObjectIterator<kBlackObjects> it(p);
3043 HeapObject* object = nullptr;
3044 while ((object = it.Next()) != nullptr) {
3045 int size = object->Size();
3046 if (object->address() > slot) return nullptr;
3047 if (object->address() <= slot && slot < (object->address() + size)) {
3048 return object;
3049 }
3024 } 3050 }
3025 } 3051 }
3026
3027 return nullptr; 3052 return nullptr;
3028 } 3053 }
3029 3054
3030 3055
3031 void MarkCompactCollector::EvacuateNewSpacePrologue() { 3056 void MarkCompactCollector::EvacuateNewSpacePrologue() {
3032 NewSpace* new_space = heap()->new_space(); 3057 NewSpace* new_space = heap()->new_space();
3033 // Append the list of new space pages to be processed. 3058 // Append the list of new space pages to be processed.
3034 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) { 3059 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) {
3035 newspace_evacuation_candidates_.Add(p); 3060 newspace_evacuation_candidates_.Add(p);
3036 } 3061 }
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
3353 }; 3378 };
3354 3379
3355 int MarkCompactCollector::Sweeper::RawSweep( 3380 int MarkCompactCollector::Sweeper::RawSweep(
3356 Page* p, FreeListRebuildingMode free_list_mode, 3381 Page* p, FreeListRebuildingMode free_list_mode,
3357 FreeSpaceTreatmentMode free_space_mode) { 3382 FreeSpaceTreatmentMode free_space_mode) {
3358 Space* space = p->owner(); 3383 Space* space = p->owner();
3359 DCHECK_NOT_NULL(space); 3384 DCHECK_NOT_NULL(space);
3360 DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE || 3385 DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE ||
3361 space->identity() == CODE_SPACE || space->identity() == MAP_SPACE); 3386 space->identity() == CODE_SPACE || space->identity() == MAP_SPACE);
3362 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); 3387 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
3388 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE));
3363 3389
3364 // Before we sweep objects on the page, we free dead array buffers which 3390 // Before we sweep objects on the page, we free dead array buffers which
3365 // requires valid mark bits. 3391 // requires valid mark bits.
3366 ArrayBufferTracker::FreeDead(p); 3392 ArrayBufferTracker::FreeDead(p);
3367 3393
3368 Address free_start = p->area_start(); 3394 Address free_start = p->area_start();
3369 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); 3395 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3370 3396
3371 // If we use the skip list for code space pages, we have to lock the skip 3397 // If we use the skip list for code space pages, we have to lock the skip
3372 // list because it could be accessed concurrently by the runtime or the 3398 // list because it could be accessed concurrently by the runtime or the
(...skipping 532 matching lines...) Expand 10 before | Expand all | Expand 10 after
3905 return page; 3931 return page;
3906 } 3932 }
3907 3933
3908 void MarkCompactCollector::Sweeper::AddSweepingPageSafe(AllocationSpace space, 3934 void MarkCompactCollector::Sweeper::AddSweepingPageSafe(AllocationSpace space,
3909 Page* page) { 3935 Page* page) {
3910 base::LockGuard<base::Mutex> guard(&mutex_); 3936 base::LockGuard<base::Mutex> guard(&mutex_);
3911 sweeping_list_[space].push_back(page); 3937 sweeping_list_[space].push_back(page);
3912 } 3938 }
3913 3939
3914 void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { 3940 void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
3941 Address space_top = space->top();
3915 space->ClearStats(); 3942 space->ClearStats();
3916 3943
3917 int will_be_swept = 0; 3944 int will_be_swept = 0;
3918 bool unused_page_present = false; 3945 bool unused_page_present = false;
3919 3946
3920 // Loop needs to support deletion if live bytes == 0 for a page. 3947 // Loop needs to support deletion if live bytes == 0 for a page.
3921 for (auto it = space->begin(); it != space->end();) { 3948 for (auto it = space->begin(); it != space->end();) {
3922 Page* p = *(it++); 3949 Page* p = *(it++);
3923 DCHECK(p->SweepingDone()); 3950 DCHECK(p->SweepingDone());
3924 3951
3925 if (p->IsEvacuationCandidate()) { 3952 if (p->IsEvacuationCandidate()) {
3926 // Will be processed in EvacuateNewSpaceAndCandidates. 3953 // Will be processed in EvacuateNewSpaceAndCandidates.
3927 DCHECK(evacuation_candidates_.length() > 0); 3954 DCHECK(evacuation_candidates_.length() > 0);
3928 continue; 3955 continue;
3929 } 3956 }
3930 3957
3958 // We can not sweep black pages, since all mark bits are set for these
3959 // pages.
3960 if (p->IsFlagSet(Page::BLACK_PAGE)) {
3961 p->ClearLiveness();
3962 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
3963 p->ClearFlag(Page::BLACK_PAGE);
3964 // Area above the high watermark is free.
3965 Address free_start = p->HighWaterMark();
3966 // Check if the space top was in this page, which means that the
3967 // high watermark is not up-to-date.
3968 if (free_start < space_top && space_top <= p->area_end()) {
3969 free_start = space_top;
3970 }
3971 int size = static_cast<int>(p->area_end() - free_start);
3972 space->Free(free_start, size);
3973 continue;
3974 }
3975
3931 if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { 3976 if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
3932 // We need to sweep the page to get it into an iterable state again. Note 3977 // We need to sweep the page to get it into an iterable state again. Note
3933 // that this adds unusable memory into the free list that is later on 3978 // that this adds unusable memory into the free list that is later on
3934 // (in the free list) dropped again. Since we only use the flag for 3979 // (in the free list) dropped again. Since we only use the flag for
3935 // testing this is fine. 3980 // testing this is fine.
3936 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); 3981 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress);
3937 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST, 3982 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST,
3938 Heap::ShouldZapGarbage() ? Sweeper::ZAP_FREE_SPACE 3983 Heap::ShouldZapGarbage() ? Sweeper::ZAP_FREE_SPACE
3939 : Sweeper::IGNORE_FREE_SPACE); 3984 : Sweeper::IGNORE_FREE_SPACE);
3940 continue; 3985 continue;
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
4038 // The target is always in old space, we don't have to record the slot in 4083 // The target is always in old space, we don't have to record the slot in
4039 // the old-to-new remembered set. 4084 // the old-to-new remembered set.
4040 DCHECK(!heap()->InNewSpace(target)); 4085 DCHECK(!heap()->InNewSpace(target));
4041 RecordRelocSlot(host, &rinfo, target); 4086 RecordRelocSlot(host, &rinfo, target);
4042 } 4087 }
4043 } 4088 }
4044 } 4089 }
4045 4090
4046 } // namespace internal 4091 } // namespace internal
4047 } // namespace v8 4092 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698