Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(103)

Side by Side Diff: src/heap/heap.cc

Issue 2849763004: [heap] Remove unused field of IterateAndScavengePromotedObjectsVisitor. (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/assembler-inl.h" 9 #include "src/assembler-inl.h"
10 #include "src/ast/context-slot-cache.h" 10 #include "src/ast/context-slot-cache.h"
(...skipping 4794 matching lines...) Expand 10 before | Expand all | Expand 10 after
4805 PageRange(new_space_->FromSpaceStart(), new_space_->FromSpaceEnd())) { 4805 PageRange(new_space_->FromSpaceStart(), new_space_->FromSpaceEnd())) {
4806 for (Address cursor = page->area_start(), limit = page->area_end(); 4806 for (Address cursor = page->area_start(), limit = page->area_end();
4807 cursor < limit; cursor += kPointerSize) { 4807 cursor < limit; cursor += kPointerSize) {
4808 Memory::Address_at(cursor) = kFromSpaceZapValue; 4808 Memory::Address_at(cursor) = kFromSpaceZapValue;
4809 } 4809 }
4810 } 4810 }
4811 } 4811 }
4812 4812
4813 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { 4813 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
4814 public: 4814 public:
4815 IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, 4815 IterateAndScavengePromotedObjectsVisitor(Heap* heap, bool record_slots)
4816 bool record_slots) 4816 : heap_(heap), record_slots_(record_slots) {}
4817 : heap_(heap), target_(target), record_slots_(record_slots) {}
4818 4817
4819 inline void VisitPointers(HeapObject* host, Object** start, 4818 inline void VisitPointers(HeapObject* host, Object** start,
4820 Object** end) override { 4819 Object** end) override {
4821 DCHECK_EQ(host, target_);
4822 Address slot_address = reinterpret_cast<Address>(start); 4820 Address slot_address = reinterpret_cast<Address>(start);
4823 Page* page = Page::FromAddress(slot_address); 4821 Page* page = Page::FromAddress(slot_address);
4824 4822
4825 while (slot_address < reinterpret_cast<Address>(end)) { 4823 while (slot_address < reinterpret_cast<Address>(end)) {
4826 Object** slot = reinterpret_cast<Object**>(slot_address); 4824 Object** slot = reinterpret_cast<Object**>(slot_address);
4827 Object* target = *slot; 4825 Object* target = *slot;
4828 4826
4829 if (target->IsHeapObject()) { 4827 if (target->IsHeapObject()) {
4830 if (heap_->InFromSpace(target)) { 4828 if (heap_->InFromSpace(target)) {
4831 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), 4829 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot),
4832 HeapObject::cast(target)); 4830 HeapObject::cast(target));
4833 target = *slot; 4831 target = *slot;
4834 if (heap_->InNewSpace(target)) { 4832 if (heap_->InNewSpace(target)) {
4835 SLOW_DCHECK(heap_->InToSpace(target)); 4833 SLOW_DCHECK(heap_->InToSpace(target));
4836 SLOW_DCHECK(target->IsHeapObject()); 4834 SLOW_DCHECK(target->IsHeapObject());
4837 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address); 4835 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
4838 } 4836 }
4839 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate( 4837 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
4840 HeapObject::cast(target))); 4838 HeapObject::cast(target)));
4841 } else if (record_slots_ && 4839 } else if (record_slots_ &&
4842 MarkCompactCollector::IsOnEvacuationCandidate( 4840 MarkCompactCollector::IsOnEvacuationCandidate(
4843 HeapObject::cast(target))) { 4841 HeapObject::cast(target))) {
4844 heap_->mark_compact_collector()->RecordSlot(target_, slot, target); 4842 heap_->mark_compact_collector()->RecordSlot(host, slot, target);
4845 } 4843 }
4846 } 4844 }
4847 4845
4848 slot_address += kPointerSize; 4846 slot_address += kPointerSize;
4849 } 4847 }
4850 } 4848 }
4851 4849
4852 inline void VisitCodeEntry(JSFunction* host, 4850 inline void VisitCodeEntry(JSFunction* host,
4853 Address code_entry_slot) override { 4851 Address code_entry_slot) override {
4854 // Black allocation requires us to process objects referenced by 4852 // Black allocation requires us to process objects referenced by
4855 // promoted objects. 4853 // promoted objects.
4856 if (heap_->incremental_marking()->black_allocation()) { 4854 if (heap_->incremental_marking()->black_allocation()) {
4857 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); 4855 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
4858 IncrementalMarking::MarkGrey(heap_, code); 4856 IncrementalMarking::MarkGrey(heap_, code);
4859 } 4857 }
4860 } 4858 }
4861 4859
4862 private: 4860 private:
4863 Heap* heap_; 4861 Heap* heap_;
4864 HeapObject* target_;
4865 bool record_slots_; 4862 bool record_slots_;
4866 }; 4863 };
4867 4864
4868 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, 4865 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
4869 bool was_marked_black) { 4866 bool was_marked_black) {
4870 // We are not collecting slots on new space objects during mutation 4867 // We are not collecting slots on new space objects during mutation
4871 // thus we have to scan for pointers to evacuation candidates when we 4868 // thus we have to scan for pointers to evacuation candidates when we
4872 // promote objects. But we should not record any slots in non-black 4869 // promote objects. But we should not record any slots in non-black
4873 // objects. Grey object's slots would be rescanned. 4870 // objects. Grey object's slots would be rescanned.
4874 // White object might not survive until the end of collection 4871 // White object might not survive until the end of collection
4875 // it would be a violation of the invariant to record it's slots. 4872 // it would be a violation of the invariant to record it's slots.
4876 bool record_slots = false; 4873 bool record_slots = false;
4877 if (incremental_marking()->IsCompacting()) { 4874 if (incremental_marking()->IsCompacting()) {
4878 record_slots = 4875 record_slots =
4879 ObjectMarking::IsBlack(target, MarkingState::Internal(target)); 4876 ObjectMarking::IsBlack(target, MarkingState::Internal(target));
4880 } 4877 }
4881 4878
4882 // TODO(ulan): remove the target, the visitor now gets the host object 4879 IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
4883 // in each visit method.
4884 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
4885 if (target->IsJSFunction()) { 4880 if (target->IsJSFunction()) {
4886 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for 4881 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
4887 // this links are recorded during processing of weak lists. 4882 // this links are recorded during processing of weak lists.
4888 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); 4883 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor);
4889 } else { 4884 } else {
4890 target->IterateBody(target->map()->instance_type(), size, &visitor); 4885 target->IterateBody(target->map()->instance_type(), size, &visitor);
4891 } 4886 }
4892 4887
4893 // When black allocations is on, we have to visit not already marked black 4888 // When black allocations is on, we have to visit not already marked black
4894 // objects (in new space) promoted to black pages to keep their references 4889 // objects (in new space) promoted to black pages to keep their references
(...skipping 1542 matching lines...) Expand 10 before | Expand all | Expand 10 after
6437 case LO_SPACE: 6432 case LO_SPACE:
6438 return "LO_SPACE"; 6433 return "LO_SPACE";
6439 default: 6434 default:
6440 UNREACHABLE(); 6435 UNREACHABLE();
6441 } 6436 }
6442 return NULL; 6437 return NULL;
6443 } 6438 }
6444 6439
6445 } // namespace internal 6440 } // namespace internal
6446 } // namespace v8 6441 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698