| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 20 matching lines...) Expand all Loading... |
| 31 #include "global-handles.h" | 31 #include "global-handles.h" |
| 32 #include "ic-inl.h" | 32 #include "ic-inl.h" |
| 33 #include "mark-compact.h" | 33 #include "mark-compact.h" |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 // ------------------------------------------------------------------------- | 39 // ------------------------------------------------------------------------- |
| 40 // MarkCompactCollector | 40 // MarkCompactCollector |
| 41 | |
| 42 bool MarkCompactCollector::force_compaction_ = false; | |
| 43 bool MarkCompactCollector::compacting_collection_ = false; | |
| 44 bool MarkCompactCollector::compact_on_next_gc_ = false; | |
| 45 | |
| 46 int MarkCompactCollector::previous_marked_count_ = 0; | |
| 47 GCTracer* MarkCompactCollector::tracer_ = NULL; | |
| 48 | |
| 49 | |
| 50 #ifdef DEBUG | |
| 51 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | |
| 52 | |
| 53 // Counters used for debugging the marking phase of mark-compact or mark-sweep | |
| 54 // collection. | |
| 55 int MarkCompactCollector::live_bytes_ = 0; | |
| 56 int MarkCompactCollector::live_young_objects_ = 0; | |
| 57 int MarkCompactCollector::live_old_data_objects_ = 0; | |
| 58 int MarkCompactCollector::live_old_pointer_objects_ = 0; | |
| 59 int MarkCompactCollector::live_code_objects_ = 0; | |
| 60 int MarkCompactCollector::live_map_objects_ = 0; | |
| 61 int MarkCompactCollector::live_cell_objects_ = 0; | |
| 62 int MarkCompactCollector::live_lo_objects_ = 0; | |
| 63 #endif | |
| 64 | |
| 65 void MarkCompactCollector::CollectGarbage() { | 41 void MarkCompactCollector::CollectGarbage() { |
| 42 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 66 // Make sure that Prepare() has been called. The individual steps below will | 43 // Make sure that Prepare() has been called. The individual steps below will |
| 67 // update the state as they proceed. | 44 // update the state as they proceed. |
| 68 ASSERT(state_ == PREPARE_GC); | 45 ASSERT(data.state_ == MarkCompactCollectorData::PREPARE_GC); |
| 69 | 46 |
| 70 // Prepare has selected whether to compact the old generation or not. | 47 // Prepare has selected whether to compact the old generation or not. |
| 71 // Tell the tracer. | 48 // Tell the tracer. |
| 72 if (IsCompacting()) tracer_->set_is_compacting(); | 49 if (IsCompacting()) data.tracer_->set_is_compacting(); |
| 73 | 50 |
| 74 MarkLiveObjects(); | 51 MarkLiveObjects(); |
| 75 | 52 |
| 76 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 53 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 77 | 54 |
| 78 SweepLargeObjectSpace(); | 55 SweepLargeObjectSpace(); |
| 79 | 56 |
| 80 if (IsCompacting()) { | 57 if (IsCompacting()) { |
| 81 EncodeForwardingAddresses(); | 58 EncodeForwardingAddresses(); |
| 82 | 59 |
| 83 UpdatePointers(); | 60 UpdatePointers(); |
| 84 | 61 |
| 85 RelocateObjects(); | 62 RelocateObjects(); |
| 86 | 63 |
| 87 RebuildRSets(); | 64 RebuildRSets(); |
| 88 | 65 |
| 89 } else { | 66 } else { |
| 90 SweepSpaces(); | 67 SweepSpaces(); |
| 91 } | 68 } |
| 92 | 69 |
| 93 Finish(); | 70 Finish(); |
| 94 | 71 |
| 95 // Save the count of marked objects remaining after the collection and | 72 // Save the count of marked objects remaining after the collection and |
| 96 // null out the GC tracer. | 73 // null out the GC tracer. |
| 97 previous_marked_count_ = tracer_->marked_count(); | 74 data.previous_marked_count_ = data.tracer_->marked_count(); |
| 98 ASSERT(previous_marked_count_ == 0); | 75 ASSERT(data.previous_marked_count_ == 0); |
| 99 tracer_ = NULL; | 76 data.tracer_ = NULL; |
| 100 } | 77 } |
| 101 | 78 |
| 102 | 79 |
| 103 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 80 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| 81 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 104 // Rather than passing the tracer around we stash it in a static member | 82 // Rather than passing the tracer around we stash it in a static member |
| 105 // variable. | 83 // variable. |
| 106 tracer_ = tracer; | 84 data.tracer_ = tracer; |
| 107 | 85 |
| 108 #ifdef DEBUG | 86 #ifdef DEBUG |
| 109 ASSERT(state_ == IDLE); | 87 ASSERT(data.state_ == MarkCompactCollectorData::IDLE); |
| 110 state_ = PREPARE_GC; | 88 data.state_ = MarkCompactCollectorData::PREPARE_GC; |
| 111 #endif | 89 #endif |
| 112 ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 90 ASSERT(!FLAG_always_compact || !FLAG_never_compact); |
| 113 | 91 |
| 114 compacting_collection_ = | 92 data.compacting_collection_ = |
| 115 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; | 93 FLAG_always_compact || data.force_compaction_ || data.compact_on_next_gc_; |
| 116 compact_on_next_gc_ = false; | 94 data.compact_on_next_gc_ = false; |
| 117 | 95 |
| 118 if (FLAG_never_compact) compacting_collection_ = false; | 96 if (FLAG_never_compact) data.compacting_collection_ = false; |
| 119 if (FLAG_collect_maps) CreateBackPointers(); | 97 if (FLAG_collect_maps) CreateBackPointers(); |
| 120 | 98 |
| 121 #ifdef DEBUG | 99 #ifdef DEBUG |
| 122 if (compacting_collection_) { | 100 if (data.compacting_collection_) { |
| 123 // We will write bookkeeping information to the remembered set area | 101 // We will write bookkeeping information to the remembered set area |
| 124 // starting now. | 102 // starting now. |
| 125 Page::set_rset_state(Page::NOT_IN_USE); | 103 Page::set_rset_state(Page::NOT_IN_USE); |
| 126 } | 104 } |
| 127 #endif | 105 #endif |
| 128 | 106 |
| 129 PagedSpaces spaces; | 107 PagedSpaces spaces; |
| 130 while (PagedSpace* space = spaces.next()) { | 108 while (PagedSpace* space = spaces.next()) { |
| 131 space->PrepareForMarkCompact(compacting_collection_); | 109 space->PrepareForMarkCompact(data.compacting_collection_); |
| 132 } | 110 } |
| 133 | 111 |
| 134 #ifdef DEBUG | 112 #ifdef DEBUG |
| 135 live_bytes_ = 0; | 113 data.live_bytes_ = 0; |
| 136 live_young_objects_ = 0; | 114 data.live_young_objects_ = 0; |
| 137 live_old_pointer_objects_ = 0; | 115 data.live_old_pointer_objects_ = 0; |
| 138 live_old_data_objects_ = 0; | 116 data.live_old_data_objects_ = 0; |
| 139 live_code_objects_ = 0; | 117 data.live_code_objects_ = 0; |
| 140 live_map_objects_ = 0; | 118 data.live_map_objects_ = 0; |
| 141 live_cell_objects_ = 0; | 119 data.live_cell_objects_ = 0; |
| 142 live_lo_objects_ = 0; | 120 data.live_lo_objects_ = 0; |
| 143 #endif | 121 #endif |
| 144 } | 122 } |
| 145 | 123 |
| 146 | 124 |
| 147 void MarkCompactCollector::Finish() { | 125 void MarkCompactCollector::Finish() { |
| 126 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 148 #ifdef DEBUG | 127 #ifdef DEBUG |
| 149 ASSERT(state_ == SWEEP_SPACES || state_ == REBUILD_RSETS); | 128 ASSERT(data.state_ == MarkCompactCollectorData::SWEEP_SPACES || |
| 150 state_ = IDLE; | 129 data.state_ == MarkCompactCollectorData::REBUILD_RSETS); |
| 130 data.state_ = MarkCompactCollectorData::IDLE; |
| 151 #endif | 131 #endif |
| 152 // The stub cache is not traversed during GC; clear the cache to | 132 // The stub cache is not traversed during GC; clear the cache to |
| 153 // force lazy re-initialization of it. This must be done after the | 133 // force lazy re-initialization of it. This must be done after the |
| 154 // GC, because it relies on the new address of certain old space | 134 // GC, because it relies on the new address of certain old space |
| 155 // objects (empty string, illegal builtin). | 135 // objects (empty string, illegal builtin). |
| 156 StubCache::Clear(); | 136 StubCache::Clear(); |
| 157 | 137 |
| 158 // If we've just compacted old space there's no reason to check the | 138 // If we've just compacted old space there's no reason to check the |
| 159 // fragmentation limit. Just return. | 139 // fragmentation limit. Just return. |
| 160 if (HasCompacted()) return; | 140 if (HasCompacted()) return; |
| 161 | 141 |
| 162 // We compact the old generation on the next GC if it has gotten too | 142 // We compact the old generation on the next GC if it has gotten too |
| 163 // fragmented (ie, we could recover an expected amount of space by | 143 // fragmented (ie, we could recover an expected amount of space by |
| 164 // reclaiming the waste and free list blocks). | 144 // reclaiming the waste and free list blocks). |
| 165 static const int kFragmentationLimit = 15; // Percent. | 145 static const int kFragmentationLimit = 15; // Percent. |
| 166 static const int kFragmentationAllowed = 1 * MB; // Absolute. | 146 static const int kFragmentationAllowed = 1 * MB; // Absolute. |
| 167 int old_gen_recoverable = 0; | 147 int old_gen_recoverable = 0; |
| 168 int old_gen_used = 0; | 148 int old_gen_used = 0; |
| 169 | 149 |
| 170 OldSpaces spaces; | 150 OldSpaces spaces; |
| 171 while (OldSpace* space = spaces.next()) { | 151 while (OldSpace* space = spaces.next()) { |
| 172 old_gen_recoverable += space->Waste() + space->AvailableFree(); | 152 old_gen_recoverable += space->Waste() + space->AvailableFree(); |
| 173 old_gen_used += space->Size(); | 153 old_gen_used += space->Size(); |
| 174 } | 154 } |
| 175 | 155 |
| 176 int old_gen_fragmentation = | 156 int old_gen_fragmentation = |
| 177 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); | 157 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); |
| 178 if (old_gen_fragmentation > kFragmentationLimit && | 158 if (old_gen_fragmentation > kFragmentationLimit && |
| 179 old_gen_recoverable > kFragmentationAllowed) { | 159 old_gen_recoverable > kFragmentationAllowed) { |
| 180 compact_on_next_gc_ = true; | 160 data.compact_on_next_gc_ = true; |
| 181 } | 161 } |
| 182 } | 162 } |
| 183 | 163 |
| 184 | 164 class MarkCompactCollectorPrivateData { |
| 165 public: |
| 185 // ------------------------------------------------------------------------- | 166 // ------------------------------------------------------------------------- |
| 186 // Phase 1: tracing and marking live objects. | 167 // Phase 1: tracing and marking live objects. |
| 187 // before: all objects are in normal state. | 168 // before: all objects are in normal state. |
| 188 // after: a live object's map pointer is marked as '00'. | 169 // after: a live object's map pointer is marked as '00'. |
| 189 | 170 |
| 190 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 171 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
| 191 // collection. Before marking, all objects are in their normal state. After | 172 // collection. Before marking, all objects are in their normal state. After |
| 192 // marking, live objects' map pointers are marked indicating that the object | 173 // marking, live objects' map pointers are marked indicating that the object |
| 193 // has been found reachable. | 174 // has been found reachable. |
| 194 // | 175 // |
| 195 // The marking algorithm is a (mostly) depth-first (because of possible stack | 176 // The marking algorithm is a (mostly) depth-first (because of possible stack |
| 196 // overflow) traversal of the graph of objects reachable from the roots. It | 177 // overflow) traversal of the graph of objects reachable from the roots. It |
| 197 // uses an explicit stack of pointers rather than recursion. The young | 178 // uses an explicit stack of pointers rather than recursion. The young |
| 198 // generation's inactive ('from') space is used as a marking stack. The | 179 // generation's inactive ('from') space is used as a marking stack. The |
| 199 // objects in the marking stack are the ones that have been reached and marked | 180 // objects in the marking stack are the ones that have been reached and marked |
| 200 // but their children have not yet been visited. | 181 // but their children have not yet been visited. |
| 201 // | 182 // |
| 202 // The marking stack can overflow during traversal. In that case, we set an | 183 // The marking stack can overflow during traversal. In that case, we set an |
| 203 // overflow flag. When the overflow flag is set, we continue marking objects | 184 // overflow flag. When the overflow flag is set, we continue marking objects |
| 204 // reachable from the objects on the marking stack, but no longer push them on | 185 // reachable from the objects on the marking stack, but no longer push them on |
| 205 // the marking stack. Instead, we mark them as both marked and overflowed. | 186 // the marking stack. Instead, we mark them as both marked and overflowed. |
| 206 // When the stack is in the overflowed state, objects marked as overflowed | 187 // When the stack is in the overflowed state, objects marked as overflowed |
| 207 // have been reached and marked but their children have not been visited yet. | 188 // have been reached and marked but their children have not been visited yet. |
| 208 // After emptying the marking stack, we clear the overflow flag and traverse | 189 // After emptying the marking stack, we clear the overflow flag and traverse |
| 209 // the heap looking for objects marked as overflowed, push them on the stack, | 190 // the heap looking for objects marked as overflowed, push them on the stack, |
| 210 // and continue with marking. This process repeats until all reachable | 191 // and continue with marking. This process repeats until all reachable |
| 211 // objects have been marked. | 192 // objects have been marked. |
| 212 | 193 |
| 213 static MarkingStack marking_stack; | 194 MarkingStack marking_stack_; |
| 195 MarkCompactCollectorPrivateData() {} |
| 196 DISALLOW_COPY_AND_ASSIGN(MarkCompactCollectorPrivateData); |
| 197 }; |
| 214 | 198 |
| 199 MarkCompactCollectorData::MarkCompactCollectorData() |
| 200 :force_compaction_(false), |
| 201 compacting_collection_(false), |
| 202 compact_on_next_gc_(false), |
| 203 previous_marked_count_(0), |
| 204 #ifdef DEBUG |
| 205 state_(IDLE), |
| 206 |
| 207 // Counters used for debugging the marking phase of mark-compact or mark-sweep |
| 208 // collection. |
| 209 live_bytes_(0), |
| 210 live_young_objects_(0), |
| 211 live_old_data_objects_(0), |
| 212 live_old_pointer_objects_(0), |
| 213 live_code_objects_(0), |
| 214 live_map_objects_(0), |
| 215 live_cell_objects_(0), |
| 216 live_lo_objects_(0), |
| 217 #endif |
| 218 tracer_(NULL), |
| 219 private_data_(*new MarkCompactCollectorPrivateData()) { |
| 220 } |
| 221 |
| 222 MarkCompactCollectorData::~MarkCompactCollectorData() { |
| 223 delete &private_data_; |
| 224 } |
| 215 | 225 |
| 216 static inline HeapObject* ShortCircuitConsString(Object** p) { | 226 static inline HeapObject* ShortCircuitConsString(Object** p) { |
| 217 // Optimization: If the heap object pointed to by p is a non-symbol | 227 // Optimization: If the heap object pointed to by p is a non-symbol |
| 218 // cons string whose right substring is Heap::empty_string, update | 228 // cons string whose right substring is Heap::empty_string, update |
| 219 // it in place to its left substring. Return the updated value. | 229 // it in place to its left substring. Return the updated value. |
| 220 // | 230 // |
| 221 // Here we assume that if we change *p, we replace it with a heap object | 231 // Here we assume that if we change *p, we replace it with a heap object |
| 222 // (ie, the left substring of a cons string is always a heap object). | 232 // (ie, the left substring of a cons string is always a heap object). |
| 223 // | 233 // |
| 224 // The check performed is: | 234 // The check performed is: |
| (...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 416 return pointers_removed_; | 426 return pointers_removed_; |
| 417 } | 427 } |
| 418 private: | 428 private: |
| 419 int pointers_removed_; | 429 int pointers_removed_; |
| 420 }; | 430 }; |
| 421 | 431 |
| 422 | 432 |
| 423 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { | 433 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { |
| 424 ASSERT(!object->IsMarked()); | 434 ASSERT(!object->IsMarked()); |
| 425 ASSERT(Heap::Contains(object)); | 435 ASSERT(Heap::Contains(object)); |
| 436 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 437 private_data_.marking_stack_; |
| 438 |
| 426 if (object->IsMap()) { | 439 if (object->IsMap()) { |
| 427 Map* map = Map::cast(object); | 440 Map* map = Map::cast(object); |
| 428 if (FLAG_cleanup_caches_in_maps_at_gc) { | 441 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 429 map->ClearCodeCache(); | 442 map->ClearCodeCache(); |
| 430 } | 443 } |
| 431 SetMark(map); | 444 SetMark(map); |
| 432 if (FLAG_collect_maps && | 445 if (FLAG_collect_maps && |
| 433 map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 446 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 434 map->instance_type() <= JS_FUNCTION_TYPE) { | 447 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 435 MarkMapContents(map); | 448 MarkMapContents(map); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 463 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); | 476 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); |
| 464 SetMark(descriptors); | 477 SetMark(descriptors); |
| 465 | 478 |
| 466 FixedArray* contents = reinterpret_cast<FixedArray*>( | 479 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 467 descriptors->get(DescriptorArray::kContentArrayIndex)); | 480 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 468 ASSERT(contents->IsHeapObject()); | 481 ASSERT(contents->IsHeapObject()); |
| 469 ASSERT(!contents->IsMarked()); | 482 ASSERT(!contents->IsMarked()); |
| 470 ASSERT(contents->IsFixedArray()); | 483 ASSERT(contents->IsFixedArray()); |
| 471 ASSERT(contents->length() >= 2); | 484 ASSERT(contents->length() >= 2); |
| 472 SetMark(contents); | 485 SetMark(contents); |
| 486 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 487 private_data_.marking_stack_; |
| 473 // Contents contains (value, details) pairs. If the details say | 488 // Contents contains (value, details) pairs. If the details say |
| 474 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, | 489 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, |
| 475 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for | 490 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 476 // type MAP_TRANSITION is the value a Object* (a Map*). | 491 // type MAP_TRANSITION is the value a Object* (a Map*). |
| 477 for (int i = 0; i < contents->length(); i += 2) { | 492 for (int i = 0; i < contents->length(); i += 2) { |
| 478 // If the pair (value, details) at index i, i+1 is not | 493 // If the pair (value, details) at index i, i+1 is not |
| 479 // a transition or null descriptor, mark the value. | 494 // a transition or null descriptor, mark the value. |
| 480 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 495 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 481 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { | 496 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 482 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); | 497 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 517 map_word.ClearOverflow(); | 532 map_word.ClearOverflow(); |
| 518 return obj->SizeFromMap(map_word.ToMap()); | 533 return obj->SizeFromMap(map_word.ToMap()); |
| 519 } | 534 } |
| 520 | 535 |
| 521 | 536 |
| 522 // Fill the marking stack with overflowed objects returned by the given | 537 // Fill the marking stack with overflowed objects returned by the given |
| 523 // iterator. Stop when the marking stack is filled or the end of the space | 538 // iterator. Stop when the marking stack is filled or the end of the space |
| 524 // is reached, whichever comes first. | 539 // is reached, whichever comes first. |
| 525 template<class T> | 540 template<class T> |
| 526 static void ScanOverflowedObjects(T* it) { | 541 static void ScanOverflowedObjects(T* it) { |
| 542 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 543 private_data_.marking_stack_; |
| 527 // The caller should ensure that the marking stack is initially not full, | 544 // The caller should ensure that the marking stack is initially not full, |
| 528 // so that we don't waste effort pointlessly scanning for objects. | 545 // so that we don't waste effort pointlessly scanning for objects. |
| 529 ASSERT(!marking_stack.is_full()); | 546 ASSERT(!marking_stack.is_full()); |
| 530 | 547 |
| 531 while (it->has_next()) { | 548 while (it->has_next()) { |
| 532 HeapObject* object = it->next(); | 549 HeapObject* object = it->next(); |
| 533 if (object->IsOverflowed()) { | 550 if (object->IsOverflowed()) { |
| 534 object->ClearOverflow(); | 551 object->ClearOverflow(); |
| 535 ASSERT(object->IsMarked()); | 552 ASSERT(object->IsMarked()); |
| 536 ASSERT(Heap::Contains(object)); | 553 ASSERT(Heap::Contains(object)); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 590 | 607 |
| 591 | 608 |
| 592 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 609 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 593 // Mark the heap roots including global variables, stack variables, | 610 // Mark the heap roots including global variables, stack variables, |
| 594 // etc., and all objects reachable from them. | 611 // etc., and all objects reachable from them. |
| 595 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 612 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 596 | 613 |
| 597 // Handle the symbol table specially. | 614 // Handle the symbol table specially. |
| 598 MarkSymbolTable(); | 615 MarkSymbolTable(); |
| 599 | 616 |
| 617 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 618 private_data_.marking_stack_; |
| 600 // There may be overflowed objects in the heap. Visit them now. | 619 // There may be overflowed objects in the heap. Visit them now. |
| 601 while (marking_stack.overflowed()) { | 620 while (marking_stack.overflowed()) { |
| 602 RefillMarkingStack(); | 621 RefillMarkingStack(); |
| 603 EmptyMarkingStack(visitor->stack_visitor()); | 622 EmptyMarkingStack(visitor->stack_visitor()); |
| 604 } | 623 } |
| 605 } | 624 } |
| 606 | 625 |
| 607 | 626 |
| 608 void MarkCompactCollector::MarkObjectGroups() { | 627 void MarkCompactCollector::MarkObjectGroups() { |
| 609 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); | 628 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 637 object_groups->at(i) = NULL; | 656 object_groups->at(i) = NULL; |
| 638 } | 657 } |
| 639 } | 658 } |
| 640 | 659 |
| 641 | 660 |
| 642 // Mark all objects reachable from the objects on the marking stack. | 661 // Mark all objects reachable from the objects on the marking stack. |
| 643 // Before: the marking stack contains zero or more heap object pointers. | 662 // Before: the marking stack contains zero or more heap object pointers. |
| 644 // After: the marking stack is empty, and all objects reachable from the | 663 // After: the marking stack is empty, and all objects reachable from the |
| 645 // marking stack have been marked, or are overflowed in the heap. | 664 // marking stack have been marked, or are overflowed in the heap. |
| 646 void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) { | 665 void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) { |
| 666 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 667 private_data_.marking_stack_; |
| 647 while (!marking_stack.is_empty()) { | 668 while (!marking_stack.is_empty()) { |
| 648 HeapObject* object = marking_stack.Pop(); | 669 HeapObject* object = marking_stack.Pop(); |
| 649 ASSERT(object->IsHeapObject()); | 670 ASSERT(object->IsHeapObject()); |
| 650 ASSERT(Heap::Contains(object)); | 671 ASSERT(Heap::Contains(object)); |
| 651 ASSERT(object->IsMarked()); | 672 ASSERT(object->IsMarked()); |
| 652 ASSERT(!object->IsOverflowed()); | 673 ASSERT(!object->IsOverflowed()); |
| 653 | 674 |
| 654 // Because the object is marked, we have to recover the original map | 675 // Because the object is marked, we have to recover the original map |
| 655 // pointer and use it to mark the object's body. | 676 // pointer and use it to mark the object's body. |
| 656 MapWord map_word = object->map_word(); | 677 MapWord map_word = object->map_word(); |
| 657 map_word.ClearMark(); | 678 map_word.ClearMark(); |
| 658 Map* map = map_word.ToMap(); | 679 Map* map = map_word.ToMap(); |
| 659 MarkObject(map); | 680 MarkObject(map); |
| 660 object->IterateBody(map->instance_type(), object->SizeFromMap(map), | 681 object->IterateBody(map->instance_type(), object->SizeFromMap(map), |
| 661 visitor); | 682 visitor); |
| 662 } | 683 } |
| 663 } | 684 } |
| 664 | 685 |
| 665 | 686 |
| 666 // Sweep the heap for overflowed objects, clear their overflow bits, and | 687 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 667 // push them on the marking stack. Stop early if the marking stack fills | 688 // push them on the marking stack. Stop early if the marking stack fills |
| 668 // before sweeping completes. If sweeping completes, there are no remaining | 689 // before sweeping completes. If sweeping completes, there are no remaining |
| 669 // overflowed objects in the heap so the overflow flag on the markings stack | 690 // overflowed objects in the heap so the overflow flag on the markings stack |
| 670 // is cleared. | 691 // is cleared. |
| 671 void MarkCompactCollector::RefillMarkingStack() { | 692 void MarkCompactCollector::RefillMarkingStack() { |
| 693 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 694 private_data_.marking_stack_; |
| 672 ASSERT(marking_stack.overflowed()); | 695 ASSERT(marking_stack.overflowed()); |
| 673 | 696 |
| 674 SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize); | 697 SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize); |
| 675 ScanOverflowedObjects(&new_it); | 698 ScanOverflowedObjects(&new_it); |
| 676 if (marking_stack.is_full()) return; | 699 if (marking_stack.is_full()) return; |
| 677 | 700 |
| 678 HeapObjectIterator old_pointer_it(Heap::old_pointer_space(), | 701 HeapObjectIterator old_pointer_it(Heap::old_pointer_space(), |
| 679 &OverflowObjectSize); | 702 &OverflowObjectSize); |
| 680 ScanOverflowedObjects(&old_pointer_it); | 703 ScanOverflowedObjects(&old_pointer_it); |
| 681 if (marking_stack.is_full()) return; | 704 if (marking_stack.is_full()) return; |
| (...skipping 20 matching lines...) Expand all Loading... |
| 702 | 725 |
| 703 marking_stack.clear_overflowed(); | 726 marking_stack.clear_overflowed(); |
| 704 } | 727 } |
| 705 | 728 |
| 706 | 729 |
| 707 // Mark all objects reachable (transitively) from objects on the marking | 730 // Mark all objects reachable (transitively) from objects on the marking |
| 708 // stack. Before: the marking stack contains zero or more heap object | 731 // stack. Before: the marking stack contains zero or more heap object |
| 709 // pointers. After: the marking stack is empty and there are no overflowed | 732 // pointers. After: the marking stack is empty and there are no overflowed |
| 710 // objects in the heap. | 733 // objects in the heap. |
| 711 void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) { | 734 void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) { |
| 735 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 736 private_data_.marking_stack_; |
| 712 EmptyMarkingStack(visitor); | 737 EmptyMarkingStack(visitor); |
| 713 while (marking_stack.overflowed()) { | 738 while (marking_stack.overflowed()) { |
| 714 RefillMarkingStack(); | 739 RefillMarkingStack(); |
| 715 EmptyMarkingStack(visitor); | 740 EmptyMarkingStack(visitor); |
| 716 } | 741 } |
| 717 } | 742 } |
| 718 | 743 |
| 719 | 744 |
| 720 void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) { | 745 void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) { |
| 746 MarkingStack& marking_stack = v8_context()->mark_compact_collector_data_. |
| 747 private_data_.marking_stack_; |
| 721 bool work_to_do = true; | 748 bool work_to_do = true; |
| 722 ASSERT(marking_stack.is_empty()); | 749 ASSERT(marking_stack.is_empty()); |
| 723 while (work_to_do) { | 750 while (work_to_do) { |
| 724 MarkObjectGroups(); | 751 MarkObjectGroups(); |
| 725 work_to_do = !marking_stack.is_empty(); | 752 work_to_do = !marking_stack.is_empty(); |
| 726 ProcessMarkingStack(visitor); | 753 ProcessMarkingStack(visitor); |
| 727 } | 754 } |
| 728 } | 755 } |
| 729 | 756 |
| 730 | 757 |
| 731 void MarkCompactCollector::MarkLiveObjects() { | 758 void MarkCompactCollector::MarkLiveObjects() { |
| 759 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 732 #ifdef DEBUG | 760 #ifdef DEBUG |
| 733 ASSERT(state_ == PREPARE_GC); | 761 ASSERT(data.state_ == MarkCompactCollectorData::PREPARE_GC); |
| 734 state_ = MARK_LIVE_OBJECTS; | 762 data.state_ = MarkCompactCollectorData::MARK_LIVE_OBJECTS; |
| 735 #endif | 763 #endif |
| 764 MarkingStack& marking_stack = data.private_data_.marking_stack_; |
| 736 // The to space contains live objects, the from space is used as a marking | 765 // The to space contains live objects, the from space is used as a marking |
| 737 // stack. | 766 // stack. |
| 738 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(), | 767 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(), |
| 739 Heap::new_space()->FromSpaceHigh()); | 768 Heap::new_space()->FromSpaceHigh()); |
| 740 | 769 |
| 741 ASSERT(!marking_stack.overflowed()); | 770 ASSERT(!marking_stack.overflowed()); |
| 742 | 771 |
| 743 RootMarkingVisitor root_visitor; | 772 RootMarkingVisitor root_visitor; |
| 744 MarkRoots(&root_visitor); | 773 MarkRoots(&root_visitor); |
| 745 | 774 |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 782 | 811 |
| 783 static int CountMarkedCallback(HeapObject* obj) { | 812 static int CountMarkedCallback(HeapObject* obj) { |
| 784 MapWord map_word = obj->map_word(); | 813 MapWord map_word = obj->map_word(); |
| 785 map_word.ClearMark(); | 814 map_word.ClearMark(); |
| 786 return obj->SizeFromMap(map_word.ToMap()); | 815 return obj->SizeFromMap(map_word.ToMap()); |
| 787 } | 816 } |
| 788 | 817 |
| 789 | 818 |
| 790 #ifdef DEBUG | 819 #ifdef DEBUG |
| 791 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { | 820 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { |
| 792 live_bytes_ += obj->Size(); | 821 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 822 data.live_bytes_ += obj->Size(); |
| 793 if (Heap::new_space()->Contains(obj)) { | 823 if (Heap::new_space()->Contains(obj)) { |
| 794 live_young_objects_++; | 824 data.live_young_objects_++; |
| 795 } else if (Heap::map_space()->Contains(obj)) { | 825 } else if (Heap::map_space()->Contains(obj)) { |
| 796 ASSERT(obj->IsMap()); | 826 ASSERT(obj->IsMap()); |
| 797 live_map_objects_++; | 827 data.live_map_objects_++; |
| 798 } else if (Heap::cell_space()->Contains(obj)) { | 828 } else if (Heap::cell_space()->Contains(obj)) { |
| 799 ASSERT(obj->IsJSGlobalPropertyCell()); | 829 ASSERT(obj->IsJSGlobalPropertyCell()); |
| 800 live_cell_objects_++; | 830 data.live_cell_objects_++; |
| 801 } else if (Heap::old_pointer_space()->Contains(obj)) { | 831 } else if (Heap::old_pointer_space()->Contains(obj)) { |
| 802 live_old_pointer_objects_++; | 832 data.live_old_pointer_objects_++; |
| 803 } else if (Heap::old_data_space()->Contains(obj)) { | 833 } else if (Heap::old_data_space()->Contains(obj)) { |
| 804 live_old_data_objects_++; | 834 data.live_old_data_objects_++; |
| 805 } else if (Heap::code_space()->Contains(obj)) { | 835 } else if (Heap::code_space()->Contains(obj)) { |
| 806 live_code_objects_++; | 836 data.live_code_objects_++; |
| 807 } else if (Heap::lo_space()->Contains(obj)) { | 837 } else if (Heap::lo_space()->Contains(obj)) { |
| 808 live_lo_objects_++; | 838 data.live_lo_objects_++; |
| 809 } else { | 839 } else { |
| 810 UNREACHABLE(); | 840 UNREACHABLE(); |
| 811 } | 841 } |
| 812 } | 842 } |
| 813 #endif // DEBUG | 843 #endif // DEBUG |
| 814 | 844 |
| 815 | 845 |
| 816 void MarkCompactCollector::SweepLargeObjectSpace() { | 846 void MarkCompactCollector::SweepLargeObjectSpace() { |
| 817 #ifdef DEBUG | 847 #ifdef DEBUG |
| 818 ASSERT(state_ == MARK_LIVE_OBJECTS); | 848 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 819 state_ = | 849 ASSERT(data.state_ == MarkCompactCollectorData::MARK_LIVE_OBJECTS); |
| 820 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; | 850 data.state_ = data.compacting_collection_ ? |
| 851 MarkCompactCollectorData::ENCODE_FORWARDING_ADDRESSES : |
| 852 MarkCompactCollectorData::SWEEP_SPACES; |
| 821 #endif | 853 #endif |
| 822 // Deallocate unmarked objects and clear marked bits for marked objects. | 854 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 823 Heap::lo_space()->FreeUnmarkedObjects(); | 855 Heap::lo_space()->FreeUnmarkedObjects(); |
| 824 } | 856 } |
| 825 | 857 |
| 826 // Safe to use during marking phase only. | 858 // Safe to use during marking phase only. |
| 827 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { | 859 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { |
| 828 MapWord metamap = object->map_word(); | 860 MapWord metamap = object->map_word(); |
| 829 metamap.ClearMark(); | 861 metamap.ClearMark(); |
| 830 return metamap.ToMap()->instance_type() == MAP_TYPE; | 862 return metamap.ToMap()->instance_type() == MAP_TYPE; |
| (...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1241 ASSERT(size_in_bytes % size == 0); | 1273 ASSERT(size_in_bytes % size == 0); |
| 1242 Heap::ClearRSetRange(start, size_in_bytes); | 1274 Heap::ClearRSetRange(start, size_in_bytes); |
| 1243 Address end = start + size_in_bytes; | 1275 Address end = start + size_in_bytes; |
| 1244 for (Address a = start; a < end; a += size) { | 1276 for (Address a = start; a < end; a += size) { |
| 1245 Heap::cell_space()->Free(a); | 1277 Heap::cell_space()->Free(a); |
| 1246 } | 1278 } |
| 1247 } | 1279 } |
| 1248 | 1280 |
| 1249 | 1281 |
| 1250 void MarkCompactCollector::EncodeForwardingAddresses() { | 1282 void MarkCompactCollector::EncodeForwardingAddresses() { |
| 1251 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); | 1283 ASSERT(v8_context()->mark_compact_collector_data_.state_ == |
| 1284 MarkCompactCollectorData::ENCODE_FORWARDING_ADDRESSES); |
| 1252 // Objects in the active semispace of the young generation may be | 1285 // Objects in the active semispace of the young generation may be |
| 1253 // relocated to the inactive semispace (if not promoted). Set the | 1286 // relocated to the inactive semispace (if not promoted). Set the |
| 1254 // relocation info to the beginning of the inactive semispace. | 1287 // relocation info to the beginning of the inactive semispace. |
| 1255 Heap::new_space()->MCResetRelocationInfo(); | 1288 Heap::new_space()->MCResetRelocationInfo(); |
| 1256 | 1289 |
| 1257 // Compute the forwarding pointers in each space. | 1290 // Compute the forwarding pointers in each space. |
| 1258 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, | 1291 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, |
| 1259 IgnoreNonLiveObject>( | 1292 IgnoreNonLiveObject>( |
| 1260 Heap::old_pointer_space()); | 1293 Heap::old_pointer_space()); |
| 1261 | 1294 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1288 // allocation top. | 1321 // allocation top. |
| 1289 Heap::old_pointer_space()->MCWriteRelocationInfoToPage(); | 1322 Heap::old_pointer_space()->MCWriteRelocationInfoToPage(); |
| 1290 Heap::old_data_space()->MCWriteRelocationInfoToPage(); | 1323 Heap::old_data_space()->MCWriteRelocationInfoToPage(); |
| 1291 Heap::code_space()->MCWriteRelocationInfoToPage(); | 1324 Heap::code_space()->MCWriteRelocationInfoToPage(); |
| 1292 Heap::map_space()->MCWriteRelocationInfoToPage(); | 1325 Heap::map_space()->MCWriteRelocationInfoToPage(); |
| 1293 Heap::cell_space()->MCWriteRelocationInfoToPage(); | 1326 Heap::cell_space()->MCWriteRelocationInfoToPage(); |
| 1294 } | 1327 } |
| 1295 | 1328 |
| 1296 | 1329 |
| 1297 void MarkCompactCollector::SweepSpaces() { | 1330 void MarkCompactCollector::SweepSpaces() { |
| 1298 ASSERT(state_ == SWEEP_SPACES); | 1331 ASSERT(v8_context()->mark_compact_collector_data_.state_ == |
| 1332 MarkCompactCollectorData::SWEEP_SPACES); |
| 1299 ASSERT(!IsCompacting()); | 1333 ASSERT(!IsCompacting()); |
| 1300 // Noncompacting collections simply sweep the spaces to clear the mark | 1334 // Noncompacting collections simply sweep the spaces to clear the mark |
| 1301 // bits and free the nonlive blocks (for old and map spaces). We sweep | 1335 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 1302 // the map space last because freeing non-live maps overwrites them and | 1336 // the map space last because freeing non-live maps overwrites them and |
| 1303 // the other spaces rely on possibly non-live maps to get the sizes for | 1337 // the other spaces rely on possibly non-live maps to get the sizes for |
| 1304 // non-live objects. | 1338 // non-live objects. |
| 1305 SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock); | 1339 SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock); |
| 1306 SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock); | 1340 SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock); |
| 1307 SweepSpace(Heap::code_space(), &DeallocateCodeBlock); | 1341 SweepSpace(Heap::code_space(), &DeallocateCodeBlock); |
| 1308 SweepSpace(Heap::cell_space(), &DeallocateCellBlock); | 1342 SweepSpace(Heap::cell_space(), &DeallocateCellBlock); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1332 live_objects++; | 1366 live_objects++; |
| 1333 current += size_func(HeapObject::FromAddress(current)); | 1367 current += size_func(HeapObject::FromAddress(current)); |
| 1334 } | 1368 } |
| 1335 } | 1369 } |
| 1336 return live_objects; | 1370 return live_objects; |
| 1337 } | 1371 } |
| 1338 | 1372 |
| 1339 | 1373 |
| 1340 int MarkCompactCollector::IterateLiveObjects(NewSpace* space, | 1374 int MarkCompactCollector::IterateLiveObjects(NewSpace* space, |
| 1341 HeapObjectCallback size_f) { | 1375 HeapObjectCallback size_f) { |
| 1342 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 1376 #ifdef DEBUG |
| 1377 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 1378 ASSERT(MarkCompactCollectorData::MARK_LIVE_OBJECTS < data.state_ && |
| 1379 data.state_ <= MarkCompactCollectorData::RELOCATE_OBJECTS); |
| 1380 #endif |
| 1343 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); | 1381 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); |
| 1344 } | 1382 } |
| 1345 | 1383 |
| 1346 | 1384 |
| 1347 int MarkCompactCollector::IterateLiveObjects(PagedSpace* space, | 1385 int MarkCompactCollector::IterateLiveObjects(PagedSpace* space, |
| 1348 HeapObjectCallback size_f) { | 1386 HeapObjectCallback size_f) { |
| 1349 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 1387 #ifdef DEBUG |
| 1388 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 1389 ASSERT(MarkCompactCollectorData::MARK_LIVE_OBJECTS < data.state_ && |
| 1390 data.state_ <= MarkCompactCollectorData::RELOCATE_OBJECTS); |
| 1391 #endif |
| 1350 int total = 0; | 1392 int total = 0; |
| 1351 PageIterator it(space, PageIterator::PAGES_IN_USE); | 1393 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 1352 while (it.has_next()) { | 1394 while (it.has_next()) { |
| 1353 Page* p = it.next(); | 1395 Page* p = it.next(); |
| 1354 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), | 1396 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), |
| 1355 p->AllocationTop(), | 1397 p->AllocationTop(), |
| 1356 size_f); | 1398 size_f); |
| 1357 } | 1399 } |
| 1358 return total; | 1400 return total; |
| 1359 } | 1401 } |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1444 if (FLAG_gc_verbose) { | 1486 if (FLAG_gc_verbose) { |
| 1445 PrintF("update %p : %p -> %p\n", | 1487 PrintF("update %p : %p -> %p\n", |
| 1446 reinterpret_cast<Address>(p), old_addr, new_addr); | 1488 reinterpret_cast<Address>(p), old_addr, new_addr); |
| 1447 } | 1489 } |
| 1448 #endif | 1490 #endif |
| 1449 } | 1491 } |
| 1450 }; | 1492 }; |
| 1451 | 1493 |
| 1452 | 1494 |
| 1453 void MarkCompactCollector::UpdatePointers() { | 1495 void MarkCompactCollector::UpdatePointers() { |
| 1496 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 1454 #ifdef DEBUG | 1497 #ifdef DEBUG |
| 1455 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); | 1498 ASSERT(data.state_ == MarkCompactCollectorData::ENCODE_FORWARDING_ADDRESSES); |
| 1456 state_ = UPDATE_POINTERS; | 1499 data.state_ = MarkCompactCollectorData::UPDATE_POINTERS; |
| 1457 #endif | 1500 #endif |
| 1458 UpdatingVisitor updating_visitor; | 1501 UpdatingVisitor updating_visitor; |
| 1459 Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG); | 1502 Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG); |
| 1460 GlobalHandles::IterateWeakRoots(&updating_visitor); | 1503 GlobalHandles::IterateWeakRoots(&updating_visitor); |
| 1461 | 1504 |
| 1462 int live_maps = IterateLiveObjects(Heap::map_space(), | 1505 int live_maps = IterateLiveObjects(Heap::map_space(), |
| 1463 &UpdatePointersInOldObject); | 1506 &UpdatePointersInOldObject); |
| 1464 int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(), | 1507 int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(), |
| 1465 &UpdatePointersInOldObject); | 1508 &UpdatePointersInOldObject); |
| 1466 int live_data_olds = IterateLiveObjects(Heap::old_data_space(), | 1509 int live_data_olds = IterateLiveObjects(Heap::old_data_space(), |
| 1467 &UpdatePointersInOldObject); | 1510 &UpdatePointersInOldObject); |
| 1468 int live_codes = IterateLiveObjects(Heap::code_space(), | 1511 int live_codes = IterateLiveObjects(Heap::code_space(), |
| 1469 &UpdatePointersInOldObject); | 1512 &UpdatePointersInOldObject); |
| 1470 int live_cells = IterateLiveObjects(Heap::cell_space(), | 1513 int live_cells = IterateLiveObjects(Heap::cell_space(), |
| 1471 &UpdatePointersInOldObject); | 1514 &UpdatePointersInOldObject); |
| 1472 int live_news = IterateLiveObjects(Heap::new_space(), | 1515 int live_news = IterateLiveObjects(Heap::new_space(), |
| 1473 &UpdatePointersInNewObject); | 1516 &UpdatePointersInNewObject); |
| 1474 | 1517 |
| 1475 // Large objects do not move, the map word can be updated directly. | 1518 // Large objects do not move, the map word can be updated directly. |
| 1476 LargeObjectIterator it(Heap::lo_space()); | 1519 LargeObjectIterator it(Heap::lo_space()); |
| 1477 while (it.has_next()) UpdatePointersInNewObject(it.next()); | 1520 while (it.has_next()) UpdatePointersInNewObject(it.next()); |
| 1478 | 1521 |
| 1479 USE(live_maps); | 1522 USE(live_maps); |
| 1480 USE(live_pointer_olds); | 1523 USE(live_pointer_olds); |
| 1481 USE(live_data_olds); | 1524 USE(live_data_olds); |
| 1482 USE(live_codes); | 1525 USE(live_codes); |
| 1483 USE(live_cells); | 1526 USE(live_cells); |
| 1484 USE(live_news); | 1527 USE(live_news); |
| 1485 ASSERT(live_maps == live_map_objects_); | 1528 ASSERT(live_maps == data.live_map_objects_); |
| 1486 ASSERT(live_data_olds == live_old_data_objects_); | 1529 ASSERT(live_data_olds == data.live_old_data_objects_); |
| 1487 ASSERT(live_pointer_olds == live_old_pointer_objects_); | 1530 ASSERT(live_pointer_olds == data.live_old_pointer_objects_); |
| 1488 ASSERT(live_codes == live_code_objects_); | 1531 ASSERT(live_codes == data.live_code_objects_); |
| 1489 ASSERT(live_cells == live_cell_objects_); | 1532 ASSERT(live_cells == data.live_cell_objects_); |
| 1490 ASSERT(live_news == live_young_objects_); | 1533 ASSERT(live_news == data.live_young_objects_); |
| 1491 } | 1534 } |
| 1492 | 1535 |
| 1493 | 1536 |
| 1494 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { | 1537 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { |
| 1495 // Keep old map pointers | 1538 // Keep old map pointers |
| 1496 Map* old_map = obj->map(); | 1539 Map* old_map = obj->map(); |
| 1497 ASSERT(old_map->IsHeapObject()); | 1540 ASSERT(old_map->IsHeapObject()); |
| 1498 | 1541 |
| 1499 Address forwarded = GetForwardingAddressInOldSpace(old_map); | 1542 Address forwarded = GetForwardingAddressInOldSpace(old_map); |
| 1500 | 1543 |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1589 ASSERT(next_page->OffsetToAddress(offset) < next_page->mc_relocation_top); | 1632 ASSERT(next_page->OffsetToAddress(offset) < next_page->mc_relocation_top); |
| 1590 | 1633 |
| 1591 return next_page->OffsetToAddress(offset); | 1634 return next_page->OffsetToAddress(offset); |
| 1592 } | 1635 } |
| 1593 | 1636 |
| 1594 | 1637 |
| 1595 // ------------------------------------------------------------------------- | 1638 // ------------------------------------------------------------------------- |
| 1596 // Phase 4: Relocate objects | 1639 // Phase 4: Relocate objects |
| 1597 | 1640 |
| 1598 void MarkCompactCollector::RelocateObjects() { | 1641 void MarkCompactCollector::RelocateObjects() { |
| 1642 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 1599 #ifdef DEBUG | 1643 #ifdef DEBUG |
| 1600 ASSERT(state_ == UPDATE_POINTERS); | 1644 ASSERT(data.state_ == MarkCompactCollectorData::UPDATE_POINTERS); |
| 1601 state_ = RELOCATE_OBJECTS; | 1645 data.state_ = MarkCompactCollectorData::RELOCATE_OBJECTS; |
| 1602 #endif | 1646 #endif |
| 1603 // Relocates objects, always relocate map objects first. Relocating | 1647 // Relocates objects, always relocate map objects first. Relocating |
| 1604 // objects in other space relies on map objects to get object size. | 1648 // objects in other space relies on map objects to get object size. |
| 1605 int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject); | 1649 int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject); |
| 1606 int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(), | 1650 int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(), |
| 1607 &RelocateOldPointerObject); | 1651 &RelocateOldPointerObject); |
| 1608 int live_data_olds = IterateLiveObjects(Heap::old_data_space(), | 1652 int live_data_olds = IterateLiveObjects(Heap::old_data_space(), |
| 1609 &RelocateOldDataObject); | 1653 &RelocateOldDataObject); |
| 1610 int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject); | 1654 int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject); |
| 1611 int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject); | 1655 int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject); |
| 1612 int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject); | 1656 int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject); |
| 1613 | 1657 |
| 1614 USE(live_maps); | 1658 USE(live_maps); |
| 1615 USE(live_data_olds); | 1659 USE(live_data_olds); |
| 1616 USE(live_pointer_olds); | 1660 USE(live_pointer_olds); |
| 1617 USE(live_codes); | 1661 USE(live_codes); |
| 1618 USE(live_cells); | 1662 USE(live_cells); |
| 1619 USE(live_news); | 1663 USE(live_news); |
| 1620 ASSERT(live_maps == live_map_objects_); | 1664 ASSERT(live_maps == data.live_map_objects_); |
| 1621 ASSERT(live_data_olds == live_old_data_objects_); | 1665 ASSERT(live_data_olds == data.live_old_data_objects_); |
| 1622 ASSERT(live_pointer_olds == live_old_pointer_objects_); | 1666 ASSERT(live_pointer_olds == data.live_old_pointer_objects_); |
| 1623 ASSERT(live_codes == live_code_objects_); | 1667 ASSERT(live_codes == data.live_code_objects_); |
| 1624 ASSERT(live_cells == live_cell_objects_); | 1668 ASSERT(live_cells == data.live_cell_objects_); |
| 1625 ASSERT(live_news == live_young_objects_); | 1669 ASSERT(live_news == data.live_young_objects_); |
| 1626 | 1670 |
| 1627 // Flip from and to spaces | 1671 // Flip from and to spaces |
| 1628 Heap::new_space()->Flip(); | 1672 Heap::new_space()->Flip(); |
| 1629 | 1673 |
| 1630 // Set age_mark to bottom in to space | 1674 // Set age_mark to bottom in to space |
| 1631 Address mark = Heap::new_space()->bottom(); | 1675 Address mark = Heap::new_space()->bottom(); |
| 1632 Heap::new_space()->set_age_mark(mark); | 1676 Heap::new_space()->set_age_mark(mark); |
| 1633 | 1677 |
| 1634 Heap::new_space()->MCCommitRelocationInfo(); | 1678 Heap::new_space()->MCCommitRelocationInfo(); |
| 1635 #ifdef DEBUG | 1679 #ifdef DEBUG |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1801 | 1845 |
| 1802 return obj_size; | 1846 return obj_size; |
| 1803 } | 1847 } |
| 1804 | 1848 |
| 1805 | 1849 |
| 1806 // ------------------------------------------------------------------------- | 1850 // ------------------------------------------------------------------------- |
| 1807 // Phase 5: rebuild remembered sets | 1851 // Phase 5: rebuild remembered sets |
| 1808 | 1852 |
| 1809 void MarkCompactCollector::RebuildRSets() { | 1853 void MarkCompactCollector::RebuildRSets() { |
| 1810 #ifdef DEBUG | 1854 #ifdef DEBUG |
| 1811 ASSERT(state_ == RELOCATE_OBJECTS); | 1855 MarkCompactCollectorData& data = v8_context()->mark_compact_collector_data_; |
| 1812 state_ = REBUILD_RSETS; | 1856 ASSERT(data.state_ == MarkCompactCollectorData::RELOCATE_OBJECTS); |
| 1857 data.state_ = MarkCompactCollectorData::REBUILD_RSETS; |
| 1813 #endif | 1858 #endif |
| 1814 Heap::RebuildRSets(); | 1859 Heap::RebuildRSets(); |
| 1815 } | 1860 } |
| 1816 | 1861 |
| 1817 } } // namespace v8::internal | 1862 } } // namespace v8::internal |
| OLD | NEW |