Chromium Code Reviews| Index: src/heap/incremental-marking.cc |
| diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc |
| index 585fdfc9723472f5e34633e71e08690bb938cadb..343542e8e53d4a65062a3cff1ccd83b1a9eeb02c 100644 |
| --- a/src/heap/incremental-marking.cc |
| +++ b/src/heap/incremental-marking.cc |
| @@ -43,10 +43,11 @@ IncrementalMarking::IncrementalMarking(Heap* heap) |
| bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
| HeapObject* value_heap_obj = HeapObject::cast(value); |
| - DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
| - marking_state(value_heap_obj))); |
| - DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
| - const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); |
| + DCHECK(!ObjectMarking::IsImpossible<kAtomicity>( |
| + value_heap_obj, marking_state(value_heap_obj))); |
| + DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj))); |
|
Michael Lippautz
2017/05/03 07:49:51
Is IsImpossible safe for concurrent marking? I rem
ulan
2017/05/03 09:00:28
It is not safe indeed. I will remove bunch of dche
Hannes Payer (out of office)
2017/05/03 10:08:53
Is the current implementation of the check not cor
ulan
2017/05/03 12:09:30
In general IsImpossible can produce false positive
|
| + const bool is_black = |
| + ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)); |
| if (is_black && WhiteToGreyAndPush(value_heap_obj)) { |
| RestartIfNotMarking(); |
| @@ -120,7 +121,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
| } |
| bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
| - if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { |
| + if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) { |
| marking_deque()->Push(obj); |
| return true; |
| } |
| @@ -144,12 +145,12 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
| MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
| MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
| - if (Marking::IsBlack(old_mark_bit)) { |
| - bool success = Marking::WhiteToBlack(new_mark_bit); |
| + if (Marking::IsBlack<kAtomicity>(old_mark_bit)) { |
| + bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit); |
| DCHECK(success); |
| USE(success); |
| - } else if (Marking::IsGrey(old_mark_bit)) { |
| - bool success = Marking::WhiteToGrey(new_mark_bit); |
| + } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) { |
| + bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit); |
| DCHECK(success); |
| USE(success); |
| marking_deque()->Push(to); |
| @@ -194,11 +195,11 @@ class IncrementalMarkingMarkingVisitor |
| } while (scan_until_end && start_offset < object_size); |
| chunk->set_progress_bar(start_offset); |
| if (start_offset < object_size) { |
| - if (ObjectMarking::IsGrey( |
| + if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>( |
| object, heap->incremental_marking()->marking_state(object))) { |
| heap->incremental_marking()->marking_deque()->Unshift(object); |
| } else { |
| - DCHECK(ObjectMarking::IsBlack( |
| + DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>( |
| object, heap->incremental_marking()->marking_state(object))); |
| heap->mark_compact_collector()->UnshiftBlack(object); |
| } |
| @@ -223,8 +224,9 @@ class IncrementalMarkingMarkingVisitor |
| // Mark the object grey if it is white, do not enque it into the marking |
| // deque. |
| Heap* heap = map->GetHeap(); |
| - bool ignored = ObjectMarking::WhiteToGrey( |
| - heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
| + bool ignored = |
| + ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>( |
| + heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
| USE(ignored); |
| } |
| } |
| @@ -259,13 +261,14 @@ class IncrementalMarkingMarkingVisitor |
| // Returns true if object needed marking and false otherwise. |
| INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
| HeapObject* heap_object = HeapObject::cast(obj); |
| - return ObjectMarking::WhiteToBlack( |
| + return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>( |
| heap_object, heap->incremental_marking()->marking_state(heap_object)); |
| } |
| }; |
| void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
| - if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { |
| + if (IsMarking() && |
| + ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) { |
| Page* page = Page::FromAddress(object->address()); |
| if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
| // IterateBlackObject requires us to visit the whole object. |
| @@ -609,7 +612,7 @@ void IncrementalMarking::ProcessWeakCells() { |
| HeapObject* value = HeapObject::cast(weak_cell->value()); |
| // Remove weak cells with live objects from the list, they do not need |
| // clearing. |
| - if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) { |
| + if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) { |
| // Record slot, if value is pointing to an evacuation candidate. |
| Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); |
| @@ -640,9 +643,10 @@ bool ShouldRetainMap(Map* map, int age) { |
| Object* constructor = map->GetConstructor(); |
| Heap* heap = map->GetHeap(); |
| if (!constructor->IsHeapObject() || |
| - ObjectMarking::IsWhite(HeapObject::cast(constructor), |
| - heap->incremental_marking()->marking_state( |
| - HeapObject::cast(constructor)))) { |
| + ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>( |
| + HeapObject::cast(constructor), |
| + heap->incremental_marking()->marking_state( |
| + HeapObject::cast(constructor)))) { |
| // The constructor is dead, no new objects with this map can |
| // be created. Do not retain this map. |
| return false; |
| @@ -672,14 +676,15 @@ void IncrementalMarking::RetainMaps() { |
| int new_age; |
| Map* map = Map::cast(cell->value()); |
| if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
| - ObjectMarking::IsWhite(map, marking_state(map))) { |
| + ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) { |
| if (ShouldRetainMap(map, age)) { |
| WhiteToGreyAndPush(map); |
| } |
| Object* prototype = map->prototype(); |
| if (age > 0 && prototype->IsHeapObject() && |
| - ObjectMarking::IsWhite(HeapObject::cast(prototype), |
| - marking_state(HeapObject::cast(prototype)))) { |
| + ObjectMarking::IsWhite<kAtomicity>( |
| + HeapObject::cast(prototype), |
| + marking_state(HeapObject::cast(prototype)))) { |
| // The prototype is not marked, age the map. |
| new_age = age - 1; |
| } else { |
| @@ -770,21 +775,21 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { |
| return nullptr; |
| } |
| HeapObject* dest = map_word.ToForwardingAddress(); |
| - if (ObjectMarking::IsBlack(dest, marking_state(dest))) { |
| + if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) { |
| // The object is already processed by the marker. |
| return nullptr; |
| } |
| - DCHECK( |
| - ObjectMarking::IsGrey(obj, marking_state(obj)) || |
| - (obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj)))); |
| + DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) || |
| + (obj->IsFiller() && |
| + ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj)))); |
| return dest; |
| } else { |
| - DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) || |
| + DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) || |
| (obj->IsFiller() && |
| - ObjectMarking::IsWhite(obj, marking_state(obj))) || |
| + ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) || |
| (MemoryChunk::FromAddress(obj->address()) |
| ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| - ObjectMarking::IsBlack(obj, marking_state(obj)))); |
| + ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)))); |
| // Skip one word filler objects that appear on the |
| // stack when we perform in place array shift. |
| return (obj->map() == filler_map) ? nullptr : obj; |
| @@ -801,11 +806,11 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| #if ENABLE_SLOW_DCHECKS |
| MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
| MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| - SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
| + SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) || |
| (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| - Marking::IsBlack(mark_bit))); |
| + Marking::IsBlack<kAtomicity>(mark_bit))); |
| #endif |
| - ObjectMarking::GreyToBlack(obj, marking_state(obj)); |
| + ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj)); |
| } |
| intptr_t IncrementalMarking::ProcessMarkingDeque( |
| @@ -818,7 +823,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque( |
| // Left trimming may result in white, grey, or black filler objects on the |
| // marking deque. Ignore these objects. |
| if (obj->IsFiller()) { |
| - DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
| + DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj))); |
| continue; |
| } |
| @@ -874,7 +879,8 @@ void IncrementalMarking::Hurry() { |
| Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
| if (!cache->IsUndefined(heap_->isolate())) { |
| // Mark the cache black if it is grey. |
| - bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); |
| + bool ignored = |
| + ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache)); |
| USE(ignored); |
| } |
| context = Context::cast(context)->next_context_link(); |