Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(453)

Side by Side Diff: src/mark-compact.cc

Issue 139973004: A64: Synchronize with r15814. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/messages.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
66 abort_incremental_marking_(false), 66 abort_incremental_marking_(false),
67 marking_parity_(ODD_MARKING_PARITY), 67 marking_parity_(ODD_MARKING_PARITY),
68 compacting_(false), 68 compacting_(false),
69 was_marked_incrementally_(false), 69 was_marked_incrementally_(false),
70 sweeping_pending_(false), 70 sweeping_pending_(false),
71 sequential_sweeping_(false), 71 sequential_sweeping_(false),
72 tracer_(NULL), 72 tracer_(NULL),
73 migration_slots_buffer_(NULL), 73 migration_slots_buffer_(NULL),
74 heap_(NULL), 74 heap_(NULL),
75 code_flusher_(NULL), 75 code_flusher_(NULL),
76 encountered_weak_maps_(NULL) { } 76 encountered_weak_collections_(NULL) { }
77 77
78 78
79 #ifdef VERIFY_HEAP 79 #ifdef VERIFY_HEAP
80 class VerifyMarkingVisitor: public ObjectVisitor { 80 class VerifyMarkingVisitor: public ObjectVisitor {
81 public: 81 public:
82 void VisitPointers(Object** start, Object** end) { 82 void VisitPointers(Object** start, Object** end) {
83 for (Object** current = start; current < end; current++) { 83 for (Object** current = start; current < end; current++) {
84 if ((*current)->IsHeapObject()) { 84 if ((*current)->IsHeapObject()) {
85 HeapObject* object = HeapObject::cast(*current); 85 HeapObject* object = HeapObject::cast(*current);
86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object));
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
389 } 389 }
390 390
391 return compacting_; 391 return compacting_;
392 } 392 }
393 393
394 394
395 void MarkCompactCollector::CollectGarbage() { 395 void MarkCompactCollector::CollectGarbage() {
396 // Make sure that Prepare() has been called. The individual steps below will 396 // Make sure that Prepare() has been called. The individual steps below will
397 // update the state as they proceed. 397 // update the state as they proceed.
398 ASSERT(state_ == PREPARE_GC); 398 ASSERT(state_ == PREPARE_GC);
399 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); 399 ASSERT(encountered_weak_collections_ == Smi::FromInt(0));
400 400
401 MarkLiveObjects(); 401 MarkLiveObjects();
402 ASSERT(heap_->incremental_marking()->IsStopped()); 402 ASSERT(heap_->incremental_marking()->IsStopped());
403 403
404 if (FLAG_collect_maps) ClearNonLiveReferences(); 404 if (FLAG_collect_maps) ClearNonLiveReferences();
405 405
406 ClearWeakMaps(); 406 ClearWeakCollections();
407 407
408 #ifdef VERIFY_HEAP 408 #ifdef VERIFY_HEAP
409 if (FLAG_verify_heap) { 409 if (FLAG_verify_heap) {
410 VerifyMarking(heap_); 410 VerifyMarking(heap_);
411 } 411 }
412 #endif 412 #endif
413 413
414 SweepSpaces(); 414 SweepSpaces();
415 415
416 if (!FLAG_collect_maps) ReattachInitialMaps(); 416 if (!FLAG_collect_maps) ReattachInitialMaps();
(...skipping 1025 matching lines...) Expand 10 before | Expand all | Expand 10 after
1442 VisitUnmarkedObject(collector, obj); 1442 VisitUnmarkedObject(collector, obj);
1443 } 1443 }
1444 return true; 1444 return true;
1445 } 1445 }
1446 1446
1447 INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) { 1447 INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
1448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); 1448 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
1449 shared->BeforeVisitingPointers(); 1449 shared->BeforeVisitingPointers();
1450 } 1450 }
1451 1451
1452 static void VisitJSWeakMap(Map* map, HeapObject* object) { 1452 static void VisitWeakCollection(Map* map, HeapObject* object) {
1453 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); 1453 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1454 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); 1454 JSWeakCollection* weak_collection =
1455 reinterpret_cast<JSWeakCollection*>(object);
1455 1456
1456 // Enqueue weak map in linked list of encountered weak maps. 1457 // Enqueue weak map in linked list of encountered weak maps.
1457 if (weak_map->next() == Smi::FromInt(0)) { 1458 if (weak_collection->next() == Smi::FromInt(0)) {
1458 weak_map->set_next(collector->encountered_weak_maps()); 1459 weak_collection->set_next(collector->encountered_weak_collections());
1459 collector->set_encountered_weak_maps(weak_map); 1460 collector->set_encountered_weak_collections(weak_collection);
1460 } 1461 }
1461 1462
1462 // Skip visiting the backing hash table containing the mappings. 1463 // Skip visiting the backing hash table containing the mappings.
1463 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); 1464 int object_size = JSWeakCollection::BodyDescriptor::SizeOf(map, object);
1464 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( 1465 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
1465 map->GetHeap(), 1466 map->GetHeap(),
1466 object, 1467 object,
1467 JSWeakMap::BodyDescriptor::kStartOffset, 1468 JSWeakCollection::BodyDescriptor::kStartOffset,
1468 JSWeakMap::kTableOffset); 1469 JSWeakCollection::kTableOffset);
1469 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( 1470 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
1470 map->GetHeap(), 1471 map->GetHeap(),
1471 object, 1472 object,
1472 JSWeakMap::kTableOffset + kPointerSize, 1473 JSWeakCollection::kTableOffset + kPointerSize,
1473 object_size); 1474 object_size);
1474 1475
1475 // Mark the backing hash table without pushing it on the marking stack. 1476 // Mark the backing hash table without pushing it on the marking stack.
1476 Object* table_object = weak_map->table(); 1477 Object* table_object = weak_collection->table();
1477 if (!table_object->IsHashTable()) return; 1478 if (!table_object->IsHashTable()) return;
1478 ObjectHashTable* table = ObjectHashTable::cast(table_object); 1479 ObjectHashTable* table = ObjectHashTable::cast(table_object);
1479 Object** table_slot = 1480 Object** table_slot =
1480 HeapObject::RawField(weak_map, JSWeakMap::kTableOffset); 1481 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
1481 MarkBit table_mark = Marking::MarkBitFrom(table); 1482 MarkBit table_mark = Marking::MarkBitFrom(table);
1482 collector->RecordSlot(table_slot, table_slot, table); 1483 collector->RecordSlot(table_slot, table_slot, table);
1483 if (!table_mark.Get()) collector->SetMark(table, table_mark); 1484 if (!table_mark.Get()) collector->SetMark(table, table_mark);
1484 // Recording the map slot can be skipped, because maps are not compacted. 1485 // Recording the map slot can be skipped, because maps are not compacted.
1485 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); 1486 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
1486 ASSERT(MarkCompactCollector::IsMarked(table->map())); 1487 ASSERT(MarkCompactCollector::IsMarked(table->map()));
1487 } 1488 }
1488 1489
1489 private: 1490 private:
1490 template<int id> 1491 template<int id>
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
1627 heap->RecordObjectStats(FIXED_ARRAY_TYPE, 1628 heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1628 DESCRIPTOR_ARRAY_SUB_TYPE, 1629 DESCRIPTOR_ARRAY_SUB_TYPE,
1629 fixed_array_size); 1630 fixed_array_size);
1630 } 1631 }
1631 if (map_obj->HasTransitionArray()) { 1632 if (map_obj->HasTransitionArray()) {
1632 int fixed_array_size = map_obj->transitions()->Size(); 1633 int fixed_array_size = map_obj->transitions()->Size();
1633 heap->RecordObjectStats(FIXED_ARRAY_TYPE, 1634 heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1634 TRANSITION_ARRAY_SUB_TYPE, 1635 TRANSITION_ARRAY_SUB_TYPE,
1635 fixed_array_size); 1636 fixed_array_size);
1636 } 1637 }
1637 if (map_obj->code_cache() != heap->empty_fixed_array()) { 1638 if (map_obj->has_code_cache()) {
1639 CodeCache* cache = CodeCache::cast(map_obj->code_cache());
1638 heap->RecordObjectStats( 1640 heap->RecordObjectStats(
1639 FIXED_ARRAY_TYPE, 1641 FIXED_ARRAY_TYPE,
1640 MAP_CODE_CACHE_SUB_TYPE, 1642 MAP_CODE_CACHE_SUB_TYPE,
1641 FixedArray::cast(map_obj->code_cache())->Size()); 1643 cache->default_cache()->Size());
1644 if (!cache->normal_type_cache()->IsUndefined()) {
1645 heap->RecordObjectStats(
1646 FIXED_ARRAY_TYPE,
1647 MAP_CODE_CACHE_SUB_TYPE,
1648 FixedArray::cast(cache->normal_type_cache())->Size());
1649 }
1642 } 1650 }
1643 ObjectStatsVisitBase(kVisitMap, map, obj); 1651 ObjectStatsVisitBase(kVisitMap, map, obj);
1644 } 1652 }
1645 }; 1653 };
1646 1654
1647 1655
1648 template<> 1656 template<>
1649 class MarkCompactMarkingVisitor::ObjectStatsTracker< 1657 class MarkCompactMarkingVisitor::ObjectStatsTracker<
1650 MarkCompactMarkingVisitor::kVisitCode> { 1658 MarkCompactMarkingVisitor::kVisitCode> {
1651 public: 1659 public:
(...skipping 579 matching lines...) Expand 10 before | Expand all | Expand 10 after
2231 2239
2232 // Mark all objects reachable (transitively) from objects on the marking 2240 // Mark all objects reachable (transitively) from objects on the marking
2233 // stack including references only considered in the atomic marking pause. 2241 // stack including references only considered in the atomic marking pause.
2234 void MarkCompactCollector::ProcessEphemeralMarking(ObjectVisitor* visitor) { 2242 void MarkCompactCollector::ProcessEphemeralMarking(ObjectVisitor* visitor) {
2235 bool work_to_do = true; 2243 bool work_to_do = true;
2236 ASSERT(marking_deque_.IsEmpty()); 2244 ASSERT(marking_deque_.IsEmpty());
2237 while (work_to_do) { 2245 while (work_to_do) {
2238 isolate()->global_handles()->IterateObjectGroups( 2246 isolate()->global_handles()->IterateObjectGroups(
2239 visitor, &IsUnmarkedHeapObjectWithHeap); 2247 visitor, &IsUnmarkedHeapObjectWithHeap);
2240 MarkImplicitRefGroups(); 2248 MarkImplicitRefGroups();
2241 ProcessWeakMaps(); 2249 ProcessWeakCollections();
2242 work_to_do = !marking_deque_.IsEmpty(); 2250 work_to_do = !marking_deque_.IsEmpty();
2243 ProcessMarkingDeque(); 2251 ProcessMarkingDeque();
2244 } 2252 }
2245 } 2253 }
2246 2254
2247 2255
2248 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { 2256 void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
2249 for (StackFrameIterator it(isolate(), isolate()->thread_local_top()); 2257 for (StackFrameIterator it(isolate(), isolate()->thread_local_top());
2250 !it.done(); it.Advance()) { 2258 !it.done(); it.Advance()) {
2251 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) { 2259 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) {
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after
2640 static_cast<DependentCode::DependencyGroup>(g), 2648 static_cast<DependentCode::DependencyGroup>(g),
2641 group_number_of_entries); 2649 group_number_of_entries);
2642 new_number_of_entries += group_number_of_entries; 2650 new_number_of_entries += group_number_of_entries;
2643 } 2651 }
2644 for (int i = new_number_of_entries; i < number_of_entries; i++) { 2652 for (int i = new_number_of_entries; i < number_of_entries; i++) {
2645 entries->clear_at(i); 2653 entries->clear_at(i);
2646 } 2654 }
2647 } 2655 }
2648 2656
2649 2657
2650 void MarkCompactCollector::ProcessWeakMaps() { 2658 void MarkCompactCollector::ProcessWeakCollections() {
2651 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKMAP_PROCESS); 2659 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2652 Object* weak_map_obj = encountered_weak_maps(); 2660 Object* weak_collection_obj = encountered_weak_collections();
2653 while (weak_map_obj != Smi::FromInt(0)) { 2661 while (weak_collection_obj != Smi::FromInt(0)) {
2654 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); 2662 ASSERT(MarkCompactCollector::IsMarked(
2655 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); 2663 HeapObject::cast(weak_collection_obj)));
2656 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); 2664 JSWeakCollection* weak_collection =
2665 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2666 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2657 Object** anchor = reinterpret_cast<Object**>(table->address()); 2667 Object** anchor = reinterpret_cast<Object**>(table->address());
2658 for (int i = 0; i < table->Capacity(); i++) { 2668 for (int i = 0; i < table->Capacity(); i++) {
2659 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2669 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2660 Object** key_slot = 2670 Object** key_slot =
2661 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( 2671 HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
2662 ObjectHashTable::EntryToIndex(i))); 2672 ObjectHashTable::EntryToIndex(i)));
2663 RecordSlot(anchor, key_slot, *key_slot); 2673 RecordSlot(anchor, key_slot, *key_slot);
2664 Object** value_slot = 2674 Object** value_slot =
2665 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( 2675 HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
2666 ObjectHashTable::EntryToValueIndex(i))); 2676 ObjectHashTable::EntryToValueIndex(i)));
2667 MarkCompactMarkingVisitor::MarkObjectByPointer( 2677 MarkCompactMarkingVisitor::MarkObjectByPointer(
2668 this, anchor, value_slot); 2678 this, anchor, value_slot);
2669 } 2679 }
2670 } 2680 }
2671 weak_map_obj = weak_map->next(); 2681 weak_collection_obj = weak_collection->next();
2672 } 2682 }
2673 } 2683 }
2674 2684
2675 2685
2676 void MarkCompactCollector::ClearWeakMaps() { 2686 void MarkCompactCollector::ClearWeakCollections() {
2677 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKMAP_CLEAR); 2687 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR);
2678 Object* weak_map_obj = encountered_weak_maps(); 2688 Object* weak_collection_obj = encountered_weak_collections();
2679 while (weak_map_obj != Smi::FromInt(0)) { 2689 while (weak_collection_obj != Smi::FromInt(0)) {
2680 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); 2690 ASSERT(MarkCompactCollector::IsMarked(
2681 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); 2691 HeapObject::cast(weak_collection_obj)));
2682 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); 2692 JSWeakCollection* weak_collection =
2693 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2694 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2683 for (int i = 0; i < table->Capacity(); i++) { 2695 for (int i = 0; i < table->Capacity(); i++) {
2684 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2696 if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2685 table->RemoveEntry(i); 2697 table->RemoveEntry(i);
2686 } 2698 }
2687 } 2699 }
2688 weak_map_obj = weak_map->next(); 2700 weak_collection_obj = weak_collection->next();
2689 weak_map->set_next(Smi::FromInt(0)); 2701 weak_collection->set_next(Smi::FromInt(0));
2690 } 2702 }
2691 set_encountered_weak_maps(Smi::FromInt(0)); 2703 set_encountered_weak_collections(Smi::FromInt(0));
2692 } 2704 }
2693 2705
2694 2706
2695 // We scavange new space simultaneously with sweeping. This is done in two 2707 // We scavange new space simultaneously with sweeping. This is done in two
2696 // passes. 2708 // passes.
2697 // 2709 //
2698 // The first pass migrates all alive objects from one semispace to another or 2710 // The first pass migrates all alive objects from one semispace to another or
2699 // promotes them to old space. Forwarding address is written directly into 2711 // promotes them to old space. Forwarding address is written directly into
2700 // first word of object without any encoding. If object is dead we write 2712 // first word of object without any encoding. If object is dead we write
2701 // NULL as a forwarding address. 2713 // NULL as a forwarding address.
(...skipping 1601 matching lines...) Expand 10 before | Expand all | Expand 10 after
4303 while (buffer != NULL) { 4315 while (buffer != NULL) {
4304 SlotsBuffer* next_buffer = buffer->next(); 4316 SlotsBuffer* next_buffer = buffer->next();
4305 DeallocateBuffer(buffer); 4317 DeallocateBuffer(buffer);
4306 buffer = next_buffer; 4318 buffer = next_buffer;
4307 } 4319 }
4308 *buffer_address = NULL; 4320 *buffer_address = NULL;
4309 } 4321 }
4310 4322
4311 4323
4312 } } // namespace v8::internal 4324 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/messages.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698