| Index: src/heap/mark-compact.cc
|
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
|
| index 3cc08f4cdad6d3d1abfe5ea7483b64b07f2eeaa7..581db4a5b92d80bc8dca2623dab9ae034098122a 100644
|
| --- a/src/heap/mark-compact.cc
|
| +++ b/src/heap/mark-compact.cc
|
| @@ -904,13 +904,13 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
|
| // setter did not record the slot update and we have to do that manually.
|
| Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
|
| Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
|
| - isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(slot,
|
| - target);
|
| + isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(
|
| + candidate, slot, target);
|
|
|
| Object** shared_code_slot =
|
| HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
|
| isolate_->heap()->mark_compact_collector()->RecordSlot(
|
| - shared_code_slot, shared_code_slot, *shared_code_slot);
|
| + shared, shared_code_slot, *shared_code_slot);
|
|
|
| candidate = next_candidate;
|
| }
|
| @@ -945,7 +945,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
|
|
|
| Object** code_slot =
|
| HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
|
| - isolate_->heap()->mark_compact_collector()->RecordSlot(code_slot, code_slot,
|
| + isolate_->heap()->mark_compact_collector()->RecordSlot(candidate, code_slot,
|
| *code_slot);
|
|
|
| candidate = next_candidate;
|
| @@ -995,15 +995,15 @@ void CodeFlusher::ProcessOptimizedCodeMaps() {
|
| Object** code_slot = code_map->RawFieldOfElementAt(
|
| new_length + SharedFunctionInfo::kCachedCodeOffset);
|
| isolate_->heap()->mark_compact_collector()->RecordSlot(
|
| - code_slot, code_slot, *code_slot);
|
| + code_map, code_slot, *code_slot);
|
| Object** context_slot = code_map->RawFieldOfElementAt(
|
| new_length + SharedFunctionInfo::kContextOffset);
|
| isolate_->heap()->mark_compact_collector()->RecordSlot(
|
| - context_slot, context_slot, *context_slot);
|
| + code_map, context_slot, *context_slot);
|
| Object** literals_slot = code_map->RawFieldOfElementAt(
|
| new_length + SharedFunctionInfo::kLiteralsOffset);
|
| isolate_->heap()->mark_compact_collector()->RecordSlot(
|
| - literals_slot, literals_slot, *literals_slot);
|
| + code_map, literals_slot, *literals_slot);
|
| new_length += SharedFunctionInfo::kEntryLength;
|
| }
|
|
|
| @@ -1017,7 +1017,7 @@ void CodeFlusher::ProcessOptimizedCodeMaps() {
|
| DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
|
| Object** slot =
|
| code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
|
| - isolate_->heap()->mark_compact_collector()->RecordSlot(slot, slot,
|
| + isolate_->heap()->mark_compact_collector()->RecordSlot(code_map, slot,
|
| *slot);
|
| }
|
| }
|
| @@ -1254,20 +1254,21 @@ class MarkCompactMarkingVisitor
|
|
|
| static void Initialize();
|
|
|
| - INLINE(static void VisitPointer(Heap* heap, Object** p)) {
|
| - MarkObjectByPointer(heap->mark_compact_collector(), p, p);
|
| + INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
|
| + MarkObjectByPointer(heap->mark_compact_collector(), object, p);
|
| }
|
|
|
| - INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
|
| + INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
|
| + Object** start, Object** end)) {
|
| // Mark all objects pointed to in [start, end).
|
| const int kMinRangeForMarkingRecursion = 64;
|
| if (end - start >= kMinRangeForMarkingRecursion) {
|
| - if (VisitUnmarkedObjects(heap, start, end)) return;
|
| + if (VisitUnmarkedObjects(heap, object, start, end)) return;
|
| // We are close to a stack overflow, so just mark the objects.
|
| }
|
| MarkCompactCollector* collector = heap->mark_compact_collector();
|
| for (Object** p = start; p < end; p++) {
|
| - MarkObjectByPointer(collector, start, p);
|
| + MarkObjectByPointer(collector, object, p);
|
| }
|
| }
|
|
|
| @@ -1290,12 +1291,12 @@ class MarkCompactMarkingVisitor
|
|
|
| // Mark object pointed to by p.
|
| INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
|
| - Object** anchor_slot, Object** p)) {
|
| + HeapObject* object, Object** p)) {
|
| if (!(*p)->IsHeapObject()) return;
|
| - HeapObject* object = ShortCircuitConsString(p);
|
| - collector->RecordSlot(anchor_slot, p, object);
|
| - MarkBit mark = Marking::MarkBitFrom(object);
|
| - collector->MarkObject(object, mark);
|
| + HeapObject* target_object = ShortCircuitConsString(p);
|
| + collector->RecordSlot(object, p, target_object);
|
| + MarkBit mark = Marking::MarkBitFrom(target_object);
|
| + collector->MarkObject(target_object, mark);
|
| }
|
|
|
|
|
| @@ -1318,8 +1319,8 @@ class MarkCompactMarkingVisitor
|
|
|
| // Visit all unmarked objects pointed to by [start, end).
|
| // Returns false if the operation fails (lack of stack space).
|
| - INLINE(static bool VisitUnmarkedObjects(Heap* heap, Object** start,
|
| - Object** end)) {
|
| + INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object,
|
| + Object** start, Object** end)) {
|
| // Return false is we are close to the stack limit.
|
| StackLimitCheck check(heap->isolate());
|
| if (check.HasOverflowed()) return false;
|
| @@ -1329,7 +1330,7 @@ class MarkCompactMarkingVisitor
|
| for (Object** p = start; p < end; p++) {
|
| Object* o = *p;
|
| if (!o->IsHeapObject()) continue;
|
| - collector->RecordSlot(start, p, o);
|
| + collector->RecordSlot(object, p, o);
|
| HeapObject* obj = HeapObject::cast(o);
|
| MarkBit mark = Marking::MarkBitFrom(obj);
|
| if (Marking::IsBlackOrGrey(mark)) continue;
|
| @@ -1370,7 +1371,7 @@ class MarkCompactMarkingVisitor
|
| FixedArray* data = FixedArray::cast(re->data());
|
| Object** slot =
|
| data->data_start() + JSRegExp::saved_code_index(is_one_byte);
|
| - heap->mark_compact_collector()->RecordSlot(slot, slot, code);
|
| + heap->mark_compact_collector()->RecordSlot(data, slot, code);
|
|
|
| // Set a number in the 0-255 range to guarantee no smi overflow.
|
| re->SetDataAt(JSRegExp::code_index(is_one_byte),
|
| @@ -2152,7 +2153,7 @@ void MarkCompactCollector::RetainMaps() {
|
| if (i != new_length) {
|
| retained_maps->Set(new_length, cell);
|
| Object** slot = retained_maps->Slot(new_length);
|
| - RecordSlot(slot, slot, cell);
|
| + RecordSlot(retained_maps, slot, cell);
|
| retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
|
| } else if (new_age != age) {
|
| retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
|
| @@ -2422,7 +2423,7 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
|
| prototype_transitions->set(header + new_number_of_transitions, cell);
|
| Object** slot = prototype_transitions->RawFieldOfElementAt(
|
| header + new_number_of_transitions);
|
| - RecordSlot(slot, slot, cell);
|
| + RecordSlot(prototype_transitions, slot, cell);
|
| }
|
| new_number_of_transitions++;
|
| }
|
| @@ -2505,7 +2506,7 @@ void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) {
|
| Name* key = t->GetKey(i);
|
| t->SetKey(transition_index, key);
|
| Object** key_slot = t->GetKeySlot(transition_index);
|
| - RecordSlot(key_slot, key_slot, key);
|
| + RecordSlot(t, key_slot, key);
|
| // Target slots do not need to be recorded since maps are not compacted.
|
| t->SetTarget(transition_index, t->GetTarget(i));
|
| }
|
| @@ -2601,15 +2602,14 @@ void MarkCompactCollector::ProcessWeakCollections() {
|
| DCHECK(MarkCompactCollector::IsMarked(weak_collection));
|
| if (weak_collection->table()->IsHashTable()) {
|
| ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
|
| - Object** anchor = reinterpret_cast<Object**>(table->address());
|
| for (int i = 0; i < table->Capacity(); i++) {
|
| if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
|
| Object** key_slot =
|
| table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
|
| - RecordSlot(anchor, key_slot, *key_slot);
|
| + RecordSlot(table, key_slot, *key_slot);
|
| Object** value_slot =
|
| table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
|
| - MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor,
|
| + MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
|
| value_slot);
|
| }
|
| }
|
| @@ -2678,9 +2678,9 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
|
| MarkBit mark = Marking::MarkBitFrom(value);
|
| SetMark(value, mark);
|
| Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
|
| - RecordSlot(slot, slot, *slot);
|
| + RecordSlot(value, slot, *slot);
|
| slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
| - RecordSlot(slot, slot, *slot);
|
| + RecordSlot(weak_cell, slot, *slot);
|
| } else {
|
| weak_cell->clear();
|
| }
|
| @@ -2689,7 +2689,7 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
|
| }
|
| } else {
|
| Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
| - RecordSlot(slot, slot, *slot);
|
| + RecordSlot(weak_cell, slot, *slot);
|
| }
|
| weak_cell_obj = weak_cell->next();
|
| weak_cell->clear_next(heap());
|
| @@ -4712,10 +4712,11 @@ void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
|
| }
|
|
|
|
|
| -void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
|
| +void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
|
| + Code* target) {
|
| Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
|
| if (target_page->IsEvacuationCandidate() &&
|
| - !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) {
|
| + !ShouldSkipEvacuationSlotRecording(object)) {
|
| if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
|
| target_page->slots_buffer_address(),
|
| SlotsBuffer::CODE_ENTRY_SLOT, slot,
|
|
|