Index: src/objects-visiting-inl.h |
diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h |
index 7ed0e4abda063c69602bfe9d977695e8e67c5bca..65c93a288bfb553ed792013c185b3896675e26d0 100644 |
--- a/src/objects-visiting-inl.h |
+++ b/src/objects-visiting-inl.h |
@@ -66,9 +66,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { |
table_.Register(kVisitFreeSpace, &VisitFreeSpace); |
- table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit); |
- |
- table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit); |
+ table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); |
table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit); |
@@ -182,9 +180,7 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() { |
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); |
- table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection); |
- |
- table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection); |
+ table_.Register(kVisitJSWeakCollection, &VisitWeakCollection); |
table_.Register(kVisitOddball, |
&FixedBodyVisitor<StaticVisitor, |
@@ -286,7 +282,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget( |
// Monomorphic ICs are preserved when possible, but need to be flushed |
// when they might be keeping a Context alive, or when the heap is about |
// to be serialized. |
- |
if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() |
&& (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || |
target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || |
@@ -402,6 +397,40 @@ void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite( |
template<typename StaticVisitor> |
+void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( |
+ Map* map, HeapObject* object) { |
+ Heap* heap = map->GetHeap(); |
+ JSWeakCollection* weak_collection = |
+ reinterpret_cast<JSWeakCollection*>(object); |
+ MarkCompactCollector* collector = heap->mark_compact_collector(); |
+ |
+ // Enqueue weak map in linked list of encountered weak maps. |
+ if (weak_collection->next() == heap->undefined_value()) { |
+ weak_collection->set_next(collector->encountered_weak_collections()); |
+ collector->set_encountered_weak_collections(weak_collection); |
+ } |
+ |
+ // Skip visiting the backing hash table containing the mappings and the |
+ // pointer to the other enqueued weak collections, both are post-processed. |
+ StaticVisitor::VisitPointers(heap, |
+ HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset), |
+ HeapObject::RawField(object, JSWeakCollection::kTableOffset)); |
+ STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize == |
+ JSWeakCollection::kNextOffset); |
+ STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize == |
+ JSWeakCollection::kSize); |
+ |
+ if (!weak_collection->table()->IsHashTable()) return; |
+ |
+ // Mark the backing hash table without pushing it on the marking stack. |
+ Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset); |
+ HeapObject* obj = HeapObject::cast(*slot); |
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
+ StaticVisitor::MarkObjectWithoutPush(heap, obj); |
+} |
+ |
+ |
+template<typename StaticVisitor> |
void StaticMarkingVisitor<StaticVisitor>::VisitCode( |
Map* map, HeapObject* object) { |
Heap* heap = map->GetHeap(); |