| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 66 abort_incremental_marking_(false), | 66 abort_incremental_marking_(false), |
| 67 marking_parity_(ODD_MARKING_PARITY), | 67 marking_parity_(ODD_MARKING_PARITY), |
| 68 compacting_(false), | 68 compacting_(false), |
| 69 was_marked_incrementally_(false), | 69 was_marked_incrementally_(false), |
| 70 sweeping_pending_(false), | 70 sweeping_pending_(false), |
| 71 sequential_sweeping_(false), | 71 sequential_sweeping_(false), |
| 72 tracer_(NULL), | 72 tracer_(NULL), |
| 73 migration_slots_buffer_(NULL), | 73 migration_slots_buffer_(NULL), |
| 74 heap_(NULL), | 74 heap_(NULL), |
| 75 code_flusher_(NULL), | 75 code_flusher_(NULL), |
| 76 encountered_weak_maps_(NULL) { } | 76 encountered_weak_collections_(NULL) { } |
| 77 | 77 |
| 78 | 78 |
| 79 #ifdef VERIFY_HEAP | 79 #ifdef VERIFY_HEAP |
| 80 class VerifyMarkingVisitor: public ObjectVisitor { | 80 class VerifyMarkingVisitor: public ObjectVisitor { |
| 81 public: | 81 public: |
| 82 void VisitPointers(Object** start, Object** end) { | 82 void VisitPointers(Object** start, Object** end) { |
| 83 for (Object** current = start; current < end; current++) { | 83 for (Object** current = start; current < end; current++) { |
| 84 if ((*current)->IsHeapObject()) { | 84 if ((*current)->IsHeapObject()) { |
| 85 HeapObject* object = HeapObject::cast(*current); | 85 HeapObject* object = HeapObject::cast(*current); |
| 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); |
| (...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 389 } | 389 } |
| 390 | 390 |
| 391 return compacting_; | 391 return compacting_; |
| 392 } | 392 } |
| 393 | 393 |
| 394 | 394 |
| 395 void MarkCompactCollector::CollectGarbage() { | 395 void MarkCompactCollector::CollectGarbage() { |
| 396 // Make sure that Prepare() has been called. The individual steps below will | 396 // Make sure that Prepare() has been called. The individual steps below will |
| 397 // update the state as they proceed. | 397 // update the state as they proceed. |
| 398 ASSERT(state_ == PREPARE_GC); | 398 ASSERT(state_ == PREPARE_GC); |
| 399 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); | 399 ASSERT(encountered_weak_collections_ == Smi::FromInt(0)); |
| 400 | 400 |
| 401 MarkLiveObjects(); | 401 MarkLiveObjects(); |
| 402 ASSERT(heap_->incremental_marking()->IsStopped()); | 402 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 403 | 403 |
| 404 if (FLAG_collect_maps) ClearNonLiveReferences(); | 404 if (FLAG_collect_maps) ClearNonLiveReferences(); |
| 405 | 405 |
| 406 ClearWeakMaps(); | 406 ClearWeakCollections(); |
| 407 | 407 |
| 408 #ifdef VERIFY_HEAP | 408 #ifdef VERIFY_HEAP |
| 409 if (FLAG_verify_heap) { | 409 if (FLAG_verify_heap) { |
| 410 VerifyMarking(heap_); | 410 VerifyMarking(heap_); |
| 411 } | 411 } |
| 412 #endif | 412 #endif |
| 413 | 413 |
| 414 SweepSpaces(); | 414 SweepSpaces(); |
| 415 | 415 |
| 416 if (!FLAG_collect_maps) ReattachInitialMaps(); | 416 if (!FLAG_collect_maps) ReattachInitialMaps(); |
| (...skipping 1025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1442 VisitUnmarkedObject(collector, obj); | 1442 VisitUnmarkedObject(collector, obj); |
| 1443 } | 1443 } |
| 1444 return true; | 1444 return true; |
| 1445 } | 1445 } |
| 1446 | 1446 |
| 1447 INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) { | 1447 INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) { |
| 1448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | 1448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
| 1449 shared->BeforeVisitingPointers(); | 1449 shared->BeforeVisitingPointers(); |
| 1450 } | 1450 } |
| 1451 | 1451 |
| 1452 static void VisitJSWeakMap(Map* map, HeapObject* object) { | 1452 static void VisitWeakCollection(Map* map, HeapObject* object) { |
| 1453 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | 1453 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
| 1454 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); | 1454 WeakCollection* weak_collection = reinterpret_cast<WeakCollection*>(object); |
| 1455 | 1455 |
| 1456 // Enqueue weak map in linked list of encountered weak maps. | 1456 // Enqueue weak map in linked list of encountered weak maps. |
| 1457 if (weak_map->next() == Smi::FromInt(0)) { | 1457 if (weak_collection->next() == Smi::FromInt(0)) { |
| 1458 weak_map->set_next(collector->encountered_weak_maps()); | 1458 weak_collection->set_next(collector->encountered_weak_collections()); |
| 1459 collector->set_encountered_weak_maps(weak_map); | 1459 collector->set_encountered_weak_collections(weak_collection); |
| 1460 } | 1460 } |
| 1461 | 1461 |
| 1462 // Skip visiting the backing hash table containing the mappings. | 1462 // Skip visiting the backing hash table containing the mappings. |
| 1463 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); | 1463 int object_size = WeakCollection::BodyDescriptor::SizeOf(map, object); |
| 1464 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( | 1464 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( |
| 1465 map->GetHeap(), | 1465 map->GetHeap(), |
| 1466 object, | 1466 object, |
| 1467 JSWeakMap::BodyDescriptor::kStartOffset, | 1467 WeakCollection::BodyDescriptor::kStartOffset, |
| 1468 JSWeakMap::kTableOffset); | 1468 WeakCollection::kTableOffset); |
| 1469 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( | 1469 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( |
| 1470 map->GetHeap(), | 1470 map->GetHeap(), |
| 1471 object, | 1471 object, |
| 1472 JSWeakMap::kTableOffset + kPointerSize, | 1472 WeakCollection::kTableOffset + kPointerSize, |
| 1473 object_size); | 1473 object_size); |
| 1474 | 1474 |
| 1475 // Mark the backing hash table without pushing it on the marking stack. | 1475 // Mark the backing hash table without pushing it on the marking stack. |
| 1476 Object* table_object = weak_map->table(); | 1476 Object* table_object = weak_collection->table(); |
| 1477 if (!table_object->IsHashTable()) return; | 1477 if (!table_object->IsHashTable()) return; |
| 1478 ObjectHashTable* table = ObjectHashTable::cast(table_object); | 1478 ObjectHashTable* table = ObjectHashTable::cast(table_object); |
| 1479 Object** table_slot = | 1479 Object** table_slot = |
| 1480 HeapObject::RawField(weak_map, JSWeakMap::kTableOffset); | 1480 HeapObject::RawField(weak_collection, WeakCollection::kTableOffset); |
| 1481 MarkBit table_mark = Marking::MarkBitFrom(table); | 1481 MarkBit table_mark = Marking::MarkBitFrom(table); |
| 1482 collector->RecordSlot(table_slot, table_slot, table); | 1482 collector->RecordSlot(table_slot, table_slot, table); |
| 1483 if (!table_mark.Get()) collector->SetMark(table, table_mark); | 1483 if (!table_mark.Get()) collector->SetMark(table, table_mark); |
| 1484 // Recording the map slot can be skipped, because maps are not compacted. | 1484 // Recording the map slot can be skipped, because maps are not compacted. |
| 1485 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); | 1485 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); |
| 1486 ASSERT(MarkCompactCollector::IsMarked(table->map())); | 1486 ASSERT(MarkCompactCollector::IsMarked(table->map())); |
| 1487 } | 1487 } |
| 1488 | 1488 |
| 1489 private: | 1489 private: |
| 1490 template<int id> | 1490 template<int id> |
| (...skipping 747 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2238 | 2238 |
| 2239 // Mark all objects reachable (transitively) from objects on the marking | 2239 // Mark all objects reachable (transitively) from objects on the marking |
| 2240 // stack including references only considered in the atomic marking pause. | 2240 // stack including references only considered in the atomic marking pause. |
| 2241 void MarkCompactCollector::ProcessEphemeralMarking(ObjectVisitor* visitor) { | 2241 void MarkCompactCollector::ProcessEphemeralMarking(ObjectVisitor* visitor) { |
| 2242 bool work_to_do = true; | 2242 bool work_to_do = true; |
| 2243 ASSERT(marking_deque_.IsEmpty()); | 2243 ASSERT(marking_deque_.IsEmpty()); |
| 2244 while (work_to_do) { | 2244 while (work_to_do) { |
| 2245 isolate()->global_handles()->IterateObjectGroups( | 2245 isolate()->global_handles()->IterateObjectGroups( |
| 2246 visitor, &IsUnmarkedHeapObjectWithHeap); | 2246 visitor, &IsUnmarkedHeapObjectWithHeap); |
| 2247 MarkImplicitRefGroups(); | 2247 MarkImplicitRefGroups(); |
| 2248 ProcessWeakMaps(); | 2248 ProcessWeakCollections(); |
| 2249 work_to_do = !marking_deque_.IsEmpty(); | 2249 work_to_do = !marking_deque_.IsEmpty(); |
| 2250 ProcessMarkingDeque(); | 2250 ProcessMarkingDeque(); |
| 2251 } | 2251 } |
| 2252 } | 2252 } |
| 2253 | 2253 |
| 2254 | 2254 |
| 2255 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { | 2255 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { |
| 2256 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); | 2256 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); |
| 2257 !it.done(); it.Advance()) { | 2257 !it.done(); it.Advance()) { |
| 2258 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { | 2258 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2647 static_cast<DependentCode::DependencyGroup>(g), | 2647 static_cast<DependentCode::DependencyGroup>(g), |
| 2648 group_number_of_entries); | 2648 group_number_of_entries); |
| 2649 new_number_of_entries += group_number_of_entries; | 2649 new_number_of_entries += group_number_of_entries; |
| 2650 } | 2650 } |
| 2651 for (int i = new_number_of_entries; i < number_of_entries; i++) { | 2651 for (int i = new_number_of_entries; i < number_of_entries; i++) { |
| 2652 entries->clear_at(i); | 2652 entries->clear_at(i); |
| 2653 } | 2653 } |
| 2654 } | 2654 } |
| 2655 | 2655 |
| 2656 | 2656 |
| 2657 void MarkCompactCollector::ProcessWeakMaps() { | 2657 void MarkCompactCollector::ProcessWeakCollections() { |
| 2658 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKMAP_PROCESS); | 2658 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); |
| 2659 Object* weak_map_obj = encountered_weak_maps(); | 2659 Object* weak_collection_obj = encountered_weak_collections(); |
| 2660 while (weak_map_obj != Smi::FromInt(0)) { | 2660 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2661 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2661 ASSERT(MarkCompactCollector::IsMarked( |
| 2662 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2662 HeapObject::cast(weak_collection_obj))); |
| 2663 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2663 WeakCollection* weak_collection = |
| 2664 reinterpret_cast<WeakCollection*>(weak_collection_obj); |
| 2665 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
| 2664 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2666 Object** anchor = reinterpret_cast<Object**>(table->address()); |
| 2665 for (int i = 0; i < table->Capacity(); i++) { | 2667 for (int i = 0; i < table->Capacity(); i++) { |
| 2666 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2668 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
| 2667 Object** key_slot = | 2669 Object** key_slot = |
| 2668 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( | 2670 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( |
| 2669 ObjectHashTable::EntryToIndex(i))); | 2671 ObjectHashTable::EntryToIndex(i))); |
| 2670 RecordSlot(anchor, key_slot, *key_slot); | 2672 RecordSlot(anchor, key_slot, *key_slot); |
| 2671 Object** value_slot = | 2673 Object** value_slot = |
| 2672 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( | 2674 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( |
| 2673 ObjectHashTable::EntryToValueIndex(i))); | 2675 ObjectHashTable::EntryToValueIndex(i))); |
| 2674 MarkCompactMarkingVisitor::MarkObjectByPointer( | 2676 MarkCompactMarkingVisitor::MarkObjectByPointer( |
| 2675 this, anchor, value_slot); | 2677 this, anchor, value_slot); |
| 2676 } | 2678 } |
| 2677 } | 2679 } |
| 2678 weak_map_obj = weak_map->next(); | 2680 weak_collection_obj = weak_collection->next(); |
| 2679 } | 2681 } |
| 2680 } | 2682 } |
| 2681 | 2683 |
| 2682 | 2684 |
| 2683 void MarkCompactCollector::ClearWeakMaps() { | 2685 void MarkCompactCollector::ClearWeakCollections() { |
| 2684 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKMAP_CLEAR); | 2686 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); |
| 2685 Object* weak_map_obj = encountered_weak_maps(); | 2687 Object* weak_collection_obj = encountered_weak_collections(); |
| 2686 while (weak_map_obj != Smi::FromInt(0)) { | 2688 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2687 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2689 ASSERT(MarkCompactCollector::IsMarked( |
| 2688 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2690 HeapObject::cast(weak_collection_obj))); |
| 2689 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2691 WeakCollection* weak_collection = |
| 2692 reinterpret_cast<WeakCollection*>(weak_collection_obj); |
| 2693 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
| 2690 for (int i = 0; i < table->Capacity(); i++) { | 2694 for (int i = 0; i < table->Capacity(); i++) { |
| 2691 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2695 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
| 2692 table->RemoveEntry(i); | 2696 table->RemoveEntry(i); |
| 2693 } | 2697 } |
| 2694 } | 2698 } |
| 2695 weak_map_obj = weak_map->next(); | 2699 weak_collection_obj = weak_collection->next(); |
| 2696 weak_map->set_next(Smi::FromInt(0)); | 2700 weak_collection->set_next(Smi::FromInt(0)); |
| 2697 } | 2701 } |
| 2698 set_encountered_weak_maps(Smi::FromInt(0)); | 2702 set_encountered_weak_collections(Smi::FromInt(0)); |
| 2699 } | 2703 } |
| 2700 | 2704 |
| 2701 | 2705 |
| 2702 // We scavange new space simultaneously with sweeping. This is done in two | 2706 // We scavange new space simultaneously with sweeping. This is done in two |
| 2703 // passes. | 2707 // passes. |
| 2704 // | 2708 // |
| 2705 // The first pass migrates all alive objects from one semispace to another or | 2709 // The first pass migrates all alive objects from one semispace to another or |
| 2706 // promotes them to old space. Forwarding address is written directly into | 2710 // promotes them to old space. Forwarding address is written directly into |
| 2707 // first word of object without any encoding. If object is dead we write | 2711 // first word of object without any encoding. If object is dead we write |
| 2708 // NULL as a forwarding address. | 2712 // NULL as a forwarding address. |
| (...skipping 1601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4310 while (buffer != NULL) { | 4314 while (buffer != NULL) { |
| 4311 SlotsBuffer* next_buffer = buffer->next(); | 4315 SlotsBuffer* next_buffer = buffer->next(); |
| 4312 DeallocateBuffer(buffer); | 4316 DeallocateBuffer(buffer); |
| 4313 buffer = next_buffer; | 4317 buffer = next_buffer; |
| 4314 } | 4318 } |
| 4315 *buffer_address = NULL; | 4319 *buffer_address = NULL; |
| 4316 } | 4320 } |
| 4317 | 4321 |
| 4318 | 4322 |
| 4319 } } // namespace v8::internal | 4323 } } // namespace v8::internal |
| OLD | NEW |