OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "code-stubs.h" | 7 #include "code-stubs.h" |
8 #include "compilation-cache.h" | 8 #include "compilation-cache.h" |
9 #include "cpu-profiler.h" | 9 #include "cpu-profiler.h" |
10 #include "deoptimizer.h" | 10 #include "deoptimizer.h" |
(...skipping 1454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1465 if (!o->IsHeapObject()) continue; | 1465 if (!o->IsHeapObject()) continue; |
1466 collector->RecordSlot(start, p, o); | 1466 collector->RecordSlot(start, p, o); |
1467 HeapObject* obj = HeapObject::cast(o); | 1467 HeapObject* obj = HeapObject::cast(o); |
1468 MarkBit mark = Marking::MarkBitFrom(obj); | 1468 MarkBit mark = Marking::MarkBitFrom(obj); |
1469 if (mark.Get()) continue; | 1469 if (mark.Get()) continue; |
1470 VisitUnmarkedObject(collector, obj); | 1470 VisitUnmarkedObject(collector, obj); |
1471 } | 1471 } |
1472 return true; | 1472 return true; |
1473 } | 1473 } |
1474 | 1474 |
1475 static void VisitWeakCollection(Map* map, HeapObject* object) { | |
1476 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | |
1477 JSWeakCollection* weak_collection = | |
1478 reinterpret_cast<JSWeakCollection*>(object); | |
1479 | |
1480 // Enqueue weak map in linked list of encountered weak maps. | |
1481 if (weak_collection->next() == Smi::FromInt(0)) { | |
1482 weak_collection->set_next(collector->encountered_weak_collections()); | |
1483 collector->set_encountered_weak_collections(weak_collection); | |
1484 } | |
1485 | |
1486 // Skip visiting the backing hash table containing the mappings. | |
1487 int object_size = JSWeakCollection::BodyDescriptor::SizeOf(map, object); | |
1488 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( | |
1489 map->GetHeap(), | |
1490 object, | |
1491 JSWeakCollection::BodyDescriptor::kStartOffset, | |
1492 JSWeakCollection::kTableOffset); | |
1493 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( | |
1494 map->GetHeap(), | |
1495 object, | |
1496 JSWeakCollection::kTableOffset + kPointerSize, | |
1497 object_size); | |
1498 | |
1499 // Mark the backing hash table without pushing it on the marking stack. | |
1500 Object* table_object = weak_collection->table(); | |
1501 if (!table_object->IsHashTable()) return; | |
1502 WeakHashTable* table = WeakHashTable::cast(table_object); | |
1503 Object** table_slot = | |
1504 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset); | |
1505 MarkBit table_mark = Marking::MarkBitFrom(table); | |
1506 collector->RecordSlot(table_slot, table_slot, table); | |
1507 if (!table_mark.Get()) collector->SetMark(table, table_mark); | |
1508 // Recording the map slot can be skipped, because maps are not compacted. | |
1509 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); | |
1510 ASSERT(MarkCompactCollector::IsMarked(table->map())); | |
1511 } | |
1512 | |
1513 private: | 1475 private: |
1514 template<int id> | 1476 template<int id> |
1515 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); | 1477 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); |
1516 | 1478 |
1517 // Code flushing support. | 1479 // Code flushing support. |
1518 | 1480 |
1519 static const int kRegExpCodeThreshold = 5; | 1481 static const int kRegExpCodeThreshold = 5; |
1520 | 1482 |
1521 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, | 1483 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, |
1522 JSRegExp* re, | 1484 JSRegExp* re, |
(...skipping 1286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2809 HeapObject::cast(weak_collection_obj))); | 2771 HeapObject::cast(weak_collection_obj))); |
2810 JSWeakCollection* weak_collection = | 2772 JSWeakCollection* weak_collection = |
2811 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2773 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
2812 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2774 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
2813 for (int i = 0; i < table->Capacity(); i++) { | 2775 for (int i = 0; i < table->Capacity(); i++) { |
2814 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2776 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
2815 table->RemoveEntry(i); | 2777 table->RemoveEntry(i); |
2816 } | 2778 } |
2817 } | 2779 } |
2818 weak_collection_obj = weak_collection->next(); | 2780 weak_collection_obj = weak_collection->next(); |
2819 weak_collection->set_next(Smi::FromInt(0)); | 2781 weak_collection->set_next(heap()->undefined_value()); |
2820 } | 2782 } |
2821 set_encountered_weak_collections(Smi::FromInt(0)); | 2783 set_encountered_weak_collections(Smi::FromInt(0)); |
2822 } | 2784 } |
2823 | 2785 |
2824 | 2786 |
2825 // We scavange new space simultaneously with sweeping. This is done in two | 2787 // We scavange new space simultaneously with sweeping. This is done in two |
2826 // passes. | 2788 // passes. |
2827 // | 2789 // |
2828 // The first pass migrates all alive objects from one semispace to another or | 2790 // The first pass migrates all alive objects from one semispace to another or |
2829 // promotes them to old space. Forwarding address is written directly into | 2791 // promotes them to old space. Forwarding address is written directly into |
(...skipping 1671 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4501 while (buffer != NULL) { | 4463 while (buffer != NULL) { |
4502 SlotsBuffer* next_buffer = buffer->next(); | 4464 SlotsBuffer* next_buffer = buffer->next(); |
4503 DeallocateBuffer(buffer); | 4465 DeallocateBuffer(buffer); |
4504 buffer = next_buffer; | 4466 buffer = next_buffer; |
4505 } | 4467 } |
4506 *buffer_address = NULL; | 4468 *buffer_address = NULL; |
4507 } | 4469 } |
4508 | 4470 |
4509 | 4471 |
4510 } } // namespace v8::internal | 4472 } } // namespace v8::internal |
OLD | NEW |