| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 #include "mark-compact.h" | 37 #include "mark-compact.h" |
| 38 #include "objects-visiting.h" | 38 #include "objects-visiting.h" |
| 39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
| 40 | 40 |
| 41 namespace v8 { | 41 namespace v8 { |
| 42 namespace internal { | 42 namespace internal { |
| 43 | 43 |
| 44 // ------------------------------------------------------------------------- | 44 // ------------------------------------------------------------------------- |
| 45 // MarkCompactCollector | 45 // MarkCompactCollector |
| 46 | 46 |
| 47 bool MarkCompactCollector::force_compaction_ = false; | 47 MarkCompactCollector::MarkCompactCollector() : // NOLINT |
| 48 bool MarkCompactCollector::sweep_precisely_ = false; | |
| 49 bool MarkCompactCollector::compacting_collection_ = false; | |
| 50 bool MarkCompactCollector::compact_on_next_gc_ = false; | |
| 51 | |
| 52 GCTracer* MarkCompactCollector::tracer_ = NULL; | |
| 53 | |
| 54 #ifdef DEBUG | 48 #ifdef DEBUG |
| 55 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | 49 state_(IDLE), |
| 56 | |
| 57 // Counters used for debugging the marking phase of mark-compact or mark-sweep | |
| 58 // collection. | |
| 59 int MarkCompactCollector::live_bytes_ = 0; | |
| 60 int MarkCompactCollector::live_young_objects_size_ = 0; | |
| 61 int MarkCompactCollector::live_old_data_objects_size_ = 0; | |
| 62 int MarkCompactCollector::live_old_pointer_objects_size_ = 0; | |
| 63 int MarkCompactCollector::live_code_objects_size_ = 0; | |
| 64 int MarkCompactCollector::live_map_objects_size_ = 0; | |
| 65 int MarkCompactCollector::live_cell_objects_size_ = 0; | |
| 66 int MarkCompactCollector::live_lo_objects_size_ = 0; | |
| 67 #endif | 50 #endif |
| 68 | 51 force_compaction_(false), |
| 69 | 52 compacting_collection_(false), |
| 70 Marking::NewSpaceMarkbitsBitmap* Marking::new_space_bitmap_ = NULL; | 53 compact_on_next_gc_(false), |
| 54 previous_marked_count_(0), |
| 55 tracer_(NULL), |
| 56 #ifdef DEBUG |
| 57 live_young_objects_size_(0), |
| 58 live_old_pointer_objects_size_(0), |
| 59 live_old_data_objects_size_(0), |
| 60 live_code_objects_size_(0), |
| 61 live_map_objects_size_(0), |
| 62 live_cell_objects_size_(0), |
| 63 live_lo_objects_size_(0), |
| 64 live_bytes_(0), |
| 65 #endif |
| 66 heap_(NULL), |
| 67 code_flusher_(NULL) { } |
| 71 | 68 |
| 72 | 69 |
| 73 bool Marking::Setup() { | 70 bool Marking::Setup() { |
| 74 if (new_space_bitmap_ == NULL) { | 71 if (new_space_bitmap_ == NULL) { |
| 72 // TODO(gc) ISOLATES |
| 75 int markbits_per_newspace = | 73 int markbits_per_newspace = |
| 76 (2*Heap::ReservedSemiSpaceSize()) >> kPointerSizeLog2; | 74 (2*HEAP->ReservedSemiSpaceSize()) >> kPointerSizeLog2; |
| 77 | 75 |
| 78 new_space_bitmap_ = | 76 new_space_bitmap_ = |
| 79 BitmapStorageDescriptor::Allocate( | 77 BitmapStorageDescriptor::Allocate( |
| 80 NewSpaceMarkbitsBitmap::CellsForLength(markbits_per_newspace)); | 78 NewSpaceMarkbitsBitmap::CellsForLength(markbits_per_newspace)); |
| 81 } | 79 } |
| 82 return new_space_bitmap_ != NULL; | 80 return new_space_bitmap_ != NULL; |
| 83 } | 81 } |
| 84 | 82 |
| 85 | 83 |
| 86 void Marking::TearDown() { | 84 void Marking::TearDown() { |
| 87 if (new_space_bitmap_ != NULL) { | 85 if (new_space_bitmap_ != NULL) { |
| 88 BitmapStorageDescriptor::Free(new_space_bitmap_); | 86 BitmapStorageDescriptor::Free(new_space_bitmap_); |
| 89 new_space_bitmap_ = NULL; | 87 new_space_bitmap_ = NULL; |
| 90 } | 88 } |
| 91 } | 89 } |
| 92 | 90 |
| 93 | 91 |
| 94 #ifdef DEBUG | 92 #ifdef DEBUG |
| 95 class VerifyMarkingVisitor: public ObjectVisitor { | 93 class VerifyMarkingVisitor: public ObjectVisitor { |
| 96 public: | 94 public: |
| 97 void VisitPointers(Object** start, Object** end) { | 95 void VisitPointers(Object** start, Object** end) { |
| 98 for (Object** current = start; current < end; current++) { | 96 for (Object** current = start; current < end; current++) { |
| 99 if ((*current)->IsHeapObject()) { | 97 if ((*current)->IsHeapObject()) { |
| 100 HeapObject* object = HeapObject::cast(*current); | 98 HeapObject* object = HeapObject::cast(*current); |
| 101 ASSERT(MarkCompactCollector::IsMarked(object)); | 99 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); |
| 102 } | 100 } |
| 103 } | 101 } |
| 104 } | 102 } |
| 105 }; | 103 }; |
| 106 | 104 |
| 107 | 105 |
| 108 static void VerifyMarking(Address bottom, Address top) { | 106 static void VerifyMarking(Address bottom, Address top) { |
| 109 VerifyMarkingVisitor visitor; | 107 VerifyMarkingVisitor visitor; |
| 110 HeapObject* object; | 108 HeapObject* object; |
| 111 Address next_object_must_be_here_or_later = bottom; | 109 Address next_object_must_be_here_or_later = bottom; |
| 112 | 110 |
| 113 for (Address current = bottom; | 111 for (Address current = bottom; |
| 114 current < top; | 112 current < top; |
| 115 current += kPointerSize) { | 113 current += kPointerSize) { |
| 116 object = HeapObject::FromAddress(current); | 114 object = HeapObject::FromAddress(current); |
| 117 if (MarkCompactCollector::IsMarked(object)) { | 115 if (HEAP->mark_compact_collector()->IsMarked(object)) { |
| 118 ASSERT(current >= next_object_must_be_here_or_later); | 116 ASSERT(current >= next_object_must_be_here_or_later); |
| 119 object->Iterate(&visitor); | 117 object->Iterate(&visitor); |
| 120 next_object_must_be_here_or_later = current + object->Size(); | 118 next_object_must_be_here_or_later = current + object->Size(); |
| 121 } | 119 } |
| 122 } | 120 } |
| 123 } | 121 } |
| 124 | 122 |
| 125 | 123 |
| 126 static void VerifyMarking(Page* p) { | 124 static void VerifyMarking(Page* p) { |
| 127 VerifyMarking(p->ObjectAreaStart(), p->ObjectAreaEnd()); | 125 VerifyMarking(p->ObjectAreaStart(), p->ObjectAreaEnd()); |
| 128 } | 126 } |
| 129 | 127 |
| 130 | 128 |
| 131 static void VerifyMarking(NewSpace* space) { | 129 static void VerifyMarking(NewSpace* space) { |
| 132 VerifyMarking(space->bottom(), space->top()); | 130 VerifyMarking(space->bottom(), space->top()); |
| 133 } | 131 } |
| 134 | 132 |
| 135 | 133 |
| 136 static void VerifyMarking(PagedSpace* space) { | 134 static void VerifyMarking(PagedSpace* space) { |
| 137 PageIterator it(space); | 135 PageIterator it(space); |
| 138 | 136 |
| 139 while (it.has_next()) { | 137 while (it.has_next()) { |
| 140 VerifyMarking(it.next()); | 138 VerifyMarking(it.next()); |
| 141 } | 139 } |
| 142 } | 140 } |
| 143 | 141 |
| 144 | 142 |
| 145 static void VerifyMarking() { | 143 static void VerifyMarking() { |
| 146 VerifyMarking(Heap::old_pointer_space()); | 144 // TODO(gc) ISOLATES |
| 147 VerifyMarking(Heap::old_data_space()); | 145 VerifyMarking(HEAP->old_pointer_space()); |
| 148 VerifyMarking(Heap::code_space()); | 146 VerifyMarking(HEAP->old_data_space()); |
| 149 VerifyMarking(Heap::cell_space()); | 147 VerifyMarking(HEAP->code_space()); |
| 150 VerifyMarking(Heap::map_space()); | 148 VerifyMarking(HEAP->cell_space()); |
| 151 VerifyMarking(Heap::new_space()); | 149 VerifyMarking(HEAP->map_space()); |
| 150 VerifyMarking(HEAP->new_space()); |
| 152 | 151 |
| 153 VerifyMarkingVisitor visitor; | 152 VerifyMarkingVisitor visitor; |
| 154 Heap::IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); | 153 HEAP->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); |
| 155 } | 154 } |
| 156 #endif | 155 #endif |
| 157 | 156 |
| 158 void MarkCompactCollector::CollectGarbage() { | 157 void MarkCompactCollector::CollectGarbage() { |
| 159 // Make sure that Prepare() has been called. The individual steps below will | 158 // Make sure that Prepare() has been called. The individual steps below will |
| 160 // update the state as they proceed. | 159 // update the state as they proceed. |
| 161 ASSERT(state_ == PREPARE_GC); | 160 ASSERT(state_ == PREPARE_GC); |
| 162 | 161 |
| 163 // Prepare has selected whether to compact the old generation or not. | 162 // Prepare has selected whether to compact the old generation or not. |
| 164 // Tell the tracer. | 163 // Tell the tracer. |
| 165 if (IsCompacting()) tracer_->set_is_compacting(); | 164 if (IsCompacting()) tracer_->set_is_compacting(); |
| 166 | 165 |
| 167 if (IncrementalMarking::IsStopped()) { | 166 if (heap_->incremental_marking()->IsStopped()) { |
| 168 MarkLiveObjects(); | 167 MarkLiveObjects(); |
| 169 } else { | 168 } else { |
| 170 { | 169 { |
| 171 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); | 170 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); |
| 172 IncrementalMarking::Finalize(); | 171 heap_->incremental_marking()->Finalize(); |
| 173 ASSERT(IncrementalMarking::IsStopped()); | 172 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 174 } | 173 } |
| 175 MarkLiveObjects(); | 174 MarkLiveObjects(); |
| 176 } | 175 } |
| 177 | 176 |
| 178 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 177 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 179 | 178 |
| 180 #ifdef DEBUG | 179 #ifdef DEBUG |
| 181 VerifyMarking(); | 180 VerifyMarking(); |
| 182 #endif | 181 #endif |
| 183 | 182 |
| 184 SweepSpaces(); | 183 SweepSpaces(); |
| 185 | 184 |
| 186 PcToCodeCache::FlushPcToCodeCache(); | 185 // TODO(gc) ISOLATES |
| 186 ISOLATE->pc_to_code_cache()->Flush(); |
| 187 | 187 |
| 188 Finish(); | 188 Finish(); |
| 189 | 189 |
| 190 // Check that swept all marked objects and | 190 // Check that swept all marked objects and |
| 191 // null out the GC tracer. | 191 // null out the GC tracer. |
| 192 // TODO(gc) does not work with conservative sweeping. | 192 // TODO(gc) does not work with conservative sweeping. |
| 193 // ASSERT(tracer_->marked_count() == 0); | 193 // ASSERT(tracer_->marked_count() == 0); |
| 194 tracer_ = NULL; | 194 tracer_ = NULL; |
| 195 } | 195 } |
| 196 | 196 |
| 197 | 197 |
| 198 #ifdef DEBUG | 198 #ifdef DEBUG |
| 199 static void VerifyMarkbitsAreClean(PagedSpace* space) { | 199 static void VerifyMarkbitsAreClean(PagedSpace* space) { |
| 200 PageIterator it(space); | 200 PageIterator it(space); |
| 201 | 201 |
| 202 while (it.has_next()) { | 202 while (it.has_next()) { |
| 203 Page* p = it.next(); | 203 Page* p = it.next(); |
| 204 ASSERT(p->markbits()->IsClean()); | 204 ASSERT(p->markbits()->IsClean()); |
| 205 } | 205 } |
| 206 } | 206 } |
| 207 | 207 |
| 208 static void VerifyMarkbitsAreClean() { | 208 static void VerifyMarkbitsAreClean() { |
| 209 VerifyMarkbitsAreClean(Heap::old_pointer_space()); | 209 VerifyMarkbitsAreClean(HEAP->old_pointer_space()); |
| 210 VerifyMarkbitsAreClean(Heap::old_data_space()); | 210 VerifyMarkbitsAreClean(HEAP->old_data_space()); |
| 211 VerifyMarkbitsAreClean(Heap::code_space()); | 211 VerifyMarkbitsAreClean(HEAP->code_space()); |
| 212 VerifyMarkbitsAreClean(Heap::cell_space()); | 212 VerifyMarkbitsAreClean(HEAP->cell_space()); |
| 213 VerifyMarkbitsAreClean(Heap::map_space()); | 213 VerifyMarkbitsAreClean(HEAP->map_space()); |
| 214 } | 214 } |
| 215 #endif | 215 #endif |
| 216 | 216 |
| 217 | 217 |
| 218 static void ClearMarkbits(PagedSpace* space) { | 218 static void ClearMarkbits(PagedSpace* space) { |
| 219 PageIterator it(space); | 219 PageIterator it(space); |
| 220 | 220 |
| 221 while (it.has_next()) { | 221 while (it.has_next()) { |
| 222 Page* p = it.next(); | 222 Page* p = it.next(); |
| 223 p->markbits()->Clear(); | 223 p->markbits()->Clear(); |
| 224 } | 224 } |
| 225 } | 225 } |
| 226 | 226 |
| 227 | 227 |
| 228 static void ClearMarkbits() { | 228 static void ClearMarkbits() { |
| 229 // TODO(gc): Clean the mark bits while sweeping. | 229 // TODO(gc): Clean the mark bits while sweeping. |
| 230 ClearMarkbits(Heap::code_space()); | 230 ClearMarkbits(HEAP->code_space()); |
| 231 ClearMarkbits(Heap::map_space()); | 231 ClearMarkbits(HEAP->map_space()); |
| 232 ClearMarkbits(Heap::old_pointer_space()); | 232 ClearMarkbits(HEAP->old_pointer_space()); |
| 233 ClearMarkbits(Heap::old_data_space()); | 233 ClearMarkbits(HEAP->old_data_space()); |
| 234 ClearMarkbits(Heap::cell_space()); | 234 ClearMarkbits(HEAP->cell_space()); |
| 235 } | 235 } |
| 236 | 236 |
| 237 | 237 |
| 238 void Marking::TransferMark(Address old_start, Address new_start) { | 238 void Marking::TransferMark(Address old_start, Address new_start) { |
| 239 if (old_start == new_start) return; | 239 if (old_start == new_start) return; |
| 240 | 240 |
| 241 MarkBit new_mark_bit = Marking::MarkBitFrom(new_start); | 241 MarkBit new_mark_bit = MarkBitFrom(new_start); |
| 242 | 242 |
| 243 if (!IncrementalMarking::IsStopped()) { | 243 if (!heap_->incremental_marking()->IsStopped()) { |
| 244 MarkBit old_mark_bit = Marking::MarkBitFrom(old_start); | 244 MarkBit old_mark_bit = MarkBitFrom(old_start); |
| 245 if (IncrementalMarking::IsBlack(old_mark_bit)) { | 245 if (heap_->incremental_marking()->IsBlack(old_mark_bit)) { |
| 246 IncrementalMarking::MarkBlack(new_mark_bit); | 246 heap_->incremental_marking()->MarkBlack(new_mark_bit); |
| 247 old_mark_bit.Clear(); | 247 old_mark_bit.Clear(); |
| 248 } else if (IncrementalMarking::IsGrey(old_mark_bit)) { | 248 } else if (heap_->incremental_marking()->IsGrey(old_mark_bit)) { |
| 249 old_mark_bit.Next().Clear(); | 249 old_mark_bit.Next().Clear(); |
| 250 IncrementalMarking::WhiteToGreyAndPush(HeapObject::FromAddress(new_start), | 250 heap_->incremental_marking()->WhiteToGreyAndPush( |
| 251 new_mark_bit); | 251 HeapObject::FromAddress(new_start), new_mark_bit); |
| 252 IncrementalMarking::RestartIfNotMarking(); | 252 heap_->incremental_marking()->RestartIfNotMarking(); |
| 253 // TODO(gc): if we shift huge array in the loop we might end up pushing | 253 // TODO(gc): if we shift huge array in the loop we might end up pushing |
| 254 // to much to marking stack. maybe we should check one or two elements | 254 // to much to marking stack. maybe we should check one or two elements |
| 255 // on top of it to see whether they are equal to old_start. | 255 // on top of it to see whether they are equal to old_start. |
| 256 } | 256 } |
| 257 } else { | 257 } else { |
| 258 ASSERT(IncrementalMarking::IsStopped()); | 258 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 259 if (Heap::InNewSpace(old_start)) { | 259 if (heap_->InNewSpace(old_start)) { |
| 260 return; | 260 return; |
| 261 } else { | 261 } else { |
| 262 MarkBit old_mark_bit = Marking::MarkBitFrom(old_start); | 262 MarkBit old_mark_bit = MarkBitFrom(old_start); |
| 263 if (!old_mark_bit.Get()) { | 263 if (!old_mark_bit.Get()) { |
| 264 return; | 264 return; |
| 265 } | 265 } |
| 266 } | 266 } |
| 267 new_mark_bit.Set(); | 267 new_mark_bit.Set(); |
| 268 } | 268 } |
| 269 } | 269 } |
| 270 | 270 |
| 271 | 271 |
| 272 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 272 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 287 ASSERT(state_ == IDLE); | 287 ASSERT(state_ == IDLE); |
| 288 state_ = PREPARE_GC; | 288 state_ = PREPARE_GC; |
| 289 #endif | 289 #endif |
| 290 ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 290 ASSERT(!FLAG_always_compact || !FLAG_never_compact); |
| 291 | 291 |
| 292 compacting_collection_ = | 292 compacting_collection_ = |
| 293 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; | 293 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; |
| 294 compact_on_next_gc_ = false; | 294 compact_on_next_gc_ = false; |
| 295 | 295 |
| 296 if (FLAG_never_compact) compacting_collection_ = false; | 296 if (FLAG_never_compact) compacting_collection_ = false; |
| 297 if (!Heap::map_space()->MapPointersEncodable()) { | 297 if (!HEAP->map_space()->MapPointersEncodable()) { |
| 298 compacting_collection_ = false; | 298 compacting_collection_ = false; |
| 299 } | 299 } |
| 300 if (FLAG_collect_maps) CreateBackPointers(); | 300 if (FLAG_collect_maps) CreateBackPointers(); |
| 301 #ifdef ENABLE_GDB_JIT_INTERFACE | 301 #ifdef ENABLE_GDB_JIT_INTERFACE |
| 302 if (FLAG_gdbjit) { | 302 if (FLAG_gdbjit) { |
| 303 // If GDBJIT interface is active disable compaction. | 303 // If GDBJIT interface is active disable compaction. |
| 304 compacting_collection_ = false; | 304 compacting_collection_ = false; |
| 305 } | 305 } |
| 306 #endif | 306 #endif |
| 307 | 307 |
| 308 PagedSpaces spaces; | 308 PagedSpaces spaces; |
| 309 for (PagedSpace* space = spaces.next(); | 309 for (PagedSpace* space = spaces.next(); |
| 310 space != NULL; space = spaces.next()) { | 310 space != NULL; space = spaces.next()) { |
| 311 space->PrepareForMarkCompact(compacting_collection_); | 311 space->PrepareForMarkCompact(compacting_collection_); |
| 312 } | 312 } |
| 313 | 313 |
| 314 if (IncrementalMarking::state() == IncrementalMarking::STOPPED) { | 314 if (heap()->incremental_marking()->IsStopped()) { |
| 315 Address new_space_bottom = Heap::new_space()->bottom(); | 315 Address new_space_bottom = heap()->new_space()->bottom(); |
| 316 uintptr_t new_space_size = | 316 uintptr_t new_space_size = |
| 317 RoundUp(Heap::new_space()->top() - new_space_bottom, 32 * kPointerSize); | 317 RoundUp(heap()->new_space()->top() - new_space_bottom, |
| 318 32 * kPointerSize); |
| 318 | 319 |
| 319 Marking::ClearRange(new_space_bottom, new_space_size); | 320 heap()->marking()->ClearRange(new_space_bottom, new_space_size); |
| 320 | 321 |
| 321 ClearMarkbits(); | 322 ClearMarkbits(); |
| 322 #ifdef DEBUG | 323 #ifdef DEBUG |
| 323 VerifyMarkbitsAreClean(); | 324 VerifyMarkbitsAreClean(); |
| 324 #endif | 325 #endif |
| 325 } | 326 } |
| 326 | 327 |
| 327 #ifdef DEBUG | 328 #ifdef DEBUG |
| 328 live_bytes_ = 0; | 329 live_bytes_ = 0; |
| 329 live_young_objects_size_ = 0; | 330 live_young_objects_size_ = 0; |
| 330 live_old_pointer_objects_size_ = 0; | 331 live_old_pointer_objects_size_ = 0; |
| 331 live_old_data_objects_size_ = 0; | 332 live_old_data_objects_size_ = 0; |
| 332 live_code_objects_size_ = 0; | 333 live_code_objects_size_ = 0; |
| 333 live_map_objects_size_ = 0; | 334 live_map_objects_size_ = 0; |
| 334 live_cell_objects_size_ = 0; | 335 live_cell_objects_size_ = 0; |
| 335 live_lo_objects_size_ = 0; | 336 live_lo_objects_size_ = 0; |
| 336 #endif | 337 #endif |
| 337 } | 338 } |
| 338 | 339 |
| 339 | 340 |
| 340 void MarkCompactCollector::Finish() { | 341 void MarkCompactCollector::Finish() { |
| 341 #ifdef DEBUG | 342 #ifdef DEBUG |
| 342 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 343 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
| 343 state_ = IDLE; | 344 state_ = IDLE; |
| 344 #endif | 345 #endif |
| 345 // The stub cache is not traversed during GC; clear the cache to | 346 // The stub cache is not traversed during GC; clear the cache to |
| 346 // force lazy re-initialization of it. This must be done after the | 347 // force lazy re-initialization of it. This must be done after the |
| 347 // GC, because it relies on the new address of certain old space | 348 // GC, because it relies on the new address of certain old space |
| 348 // objects (empty string, illegal builtin). | 349 // objects (empty string, illegal builtin). |
| 349 StubCache::Clear(); | 350 Isolate::Current()->stub_cache()->Clear(); |
| 350 | 351 |
| 351 ExternalStringTable::CleanUp(); | 352 heap_->external_string_table_.CleanUp(); |
| 352 | 353 |
| 353 // If we've just compacted old space there's no reason to check the | 354 // If we've just compacted old space there's no reason to check the |
| 354 // fragmentation limit. Just return. | 355 // fragmentation limit. Just return. |
| 355 if (HasCompacted()) return; | 356 if (HasCompacted()) return; |
| 356 | 357 |
| 357 // We compact the old generation on the next GC if it has gotten too | 358 // We compact the old generation on the next GC if it has gotten too |
| 358 // fragmented (ie, we could recover an expected amount of space by | 359 // fragmented (ie, we could recover an expected amount of space by |
| 359 // reclaiming the waste and free list blocks). | 360 // reclaiming the waste and free list blocks). |
| 360 static const int kFragmentationLimit = 15; // Percent. | 361 static const int kFragmentationLimit = 15; // Percent. |
| 361 static const int kFragmentationAllowed = 1 * MB; // Absolute. | 362 static const int kFragmentationAllowed = 1 * MB; // Absolute. |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 398 // overflow flag. When the overflow flag is set, we continue marking objects | 399 // overflow flag. When the overflow flag is set, we continue marking objects |
| 399 // reachable from the objects on the marking stack, but no longer push them on | 400 // reachable from the objects on the marking stack, but no longer push them on |
| 400 // the marking stack. Instead, we mark them as both marked and overflowed. | 401 // the marking stack. Instead, we mark them as both marked and overflowed. |
| 401 // When the stack is in the overflowed state, objects marked as overflowed | 402 // When the stack is in the overflowed state, objects marked as overflowed |
| 402 // have been reached and marked but their children have not been visited yet. | 403 // have been reached and marked but their children have not been visited yet. |
| 403 // After emptying the marking stack, we clear the overflow flag and traverse | 404 // After emptying the marking stack, we clear the overflow flag and traverse |
| 404 // the heap looking for objects marked as overflowed, push them on the stack, | 405 // the heap looking for objects marked as overflowed, push them on the stack, |
| 405 // and continue with marking. This process repeats until all reachable | 406 // and continue with marking. This process repeats until all reachable |
| 406 // objects have been marked. | 407 // objects have been marked. |
| 407 | 408 |
| 408 static MarkingStack marking_stack; | 409 class CodeFlusher { |
| 410 public: |
| 411 explicit CodeFlusher(Isolate* isolate) |
| 412 : isolate_(isolate), |
| 413 jsfunction_candidates_head_(NULL), |
| 414 shared_function_info_candidates_head_(NULL) {} |
| 409 | 415 |
| 410 class FlushCode : public AllStatic { | 416 void AddCandidate(SharedFunctionInfo* shared_info) { |
| 411 public: | |
| 412 static void AddCandidate(SharedFunctionInfo* shared_info) { | |
| 413 SetNextCandidate(shared_info, shared_function_info_candidates_head_); | 417 SetNextCandidate(shared_info, shared_function_info_candidates_head_); |
| 414 shared_function_info_candidates_head_ = shared_info; | 418 shared_function_info_candidates_head_ = shared_info; |
| 415 } | 419 } |
| 416 | 420 |
| 417 | 421 void AddCandidate(JSFunction* function) { |
| 418 static void AddCandidate(JSFunction* function) { | |
| 419 ASSERT(function->unchecked_code() == | 422 ASSERT(function->unchecked_code() == |
| 420 function->unchecked_shared()->unchecked_code()); | 423 function->unchecked_shared()->unchecked_code()); |
| 421 | 424 |
| 422 SetNextCandidate(function, jsfunction_candidates_head_); | 425 SetNextCandidate(function, jsfunction_candidates_head_); |
| 423 jsfunction_candidates_head_ = function; | 426 jsfunction_candidates_head_ = function; |
| 424 } | 427 } |
| 425 | 428 |
| 426 | 429 void ProcessCandidates() { |
| 427 static void ProcessCandidates() { | |
| 428 ProcessSharedFunctionInfoCandidates(); | 430 ProcessSharedFunctionInfoCandidates(); |
| 429 ProcessJSFunctionCandidates(); | 431 ProcessJSFunctionCandidates(); |
| 430 } | 432 } |
| 431 | 433 |
| 432 private: | 434 private: |
| 433 static void ProcessJSFunctionCandidates() { | 435 void ProcessJSFunctionCandidates() { |
| 434 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 436 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile); |
| 435 | 437 |
| 436 JSFunction* candidate = jsfunction_candidates_head_; | 438 JSFunction* candidate = jsfunction_candidates_head_; |
| 437 JSFunction* next_candidate; | 439 JSFunction* next_candidate; |
| 438 while (candidate != NULL) { | 440 while (candidate != NULL) { |
| 439 next_candidate = GetNextCandidate(candidate); | 441 next_candidate = GetNextCandidate(candidate); |
| 440 | 442 |
| 441 SharedFunctionInfo* shared = candidate->unchecked_shared(); | 443 SharedFunctionInfo* shared = candidate->unchecked_shared(); |
| 442 | 444 |
| 443 Code* code = shared->unchecked_code(); | 445 Code* code = shared->unchecked_code(); |
| 444 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 446 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 445 if (!code_mark.Get()) { | 447 if (!code_mark.Get()) { |
| 446 shared->set_code(lazy_compile); | 448 shared->set_code(lazy_compile); |
| 447 candidate->set_code(lazy_compile); | 449 candidate->set_code(lazy_compile); |
| 448 } else { | 450 } else { |
| 449 candidate->set_code(shared->unchecked_code()); | 451 candidate->set_code(shared->unchecked_code()); |
| 450 } | 452 } |
| 451 | 453 |
| 452 candidate = next_candidate; | 454 candidate = next_candidate; |
| 453 } | 455 } |
| 454 | 456 |
| 455 jsfunction_candidates_head_ = NULL; | 457 jsfunction_candidates_head_ = NULL; |
| 456 } | 458 } |
| 457 | 459 |
| 458 | 460 |
| 459 static void ProcessSharedFunctionInfoCandidates() { | 461 void ProcessSharedFunctionInfoCandidates() { |
| 460 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 462 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile); |
| 461 | 463 |
| 462 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 464 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 463 SharedFunctionInfo* next_candidate; | 465 SharedFunctionInfo* next_candidate; |
| 464 while (candidate != NULL) { | 466 while (candidate != NULL) { |
| 465 next_candidate = GetNextCandidate(candidate); | 467 next_candidate = GetNextCandidate(candidate); |
| 466 SetNextCandidate(candidate, NULL); | 468 SetNextCandidate(candidate, NULL); |
| 467 | 469 |
| 468 Code* code = candidate->unchecked_code(); | 470 Code* code = candidate->unchecked_code(); |
| 469 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 471 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 470 if (!code_mark.Get()) { | 472 if (!code_mark.Get()) { |
| 471 candidate->set_code(lazy_compile); | 473 candidate->set_code(lazy_compile); |
| 472 } | 474 } |
| 473 | 475 |
| 474 candidate = next_candidate; | 476 candidate = next_candidate; |
| 475 } | 477 } |
| 476 | 478 |
| 477 shared_function_info_candidates_head_ = NULL; | 479 shared_function_info_candidates_head_ = NULL; |
| 478 } | 480 } |
| 479 | 481 |
| 480 | |
| 481 static JSFunction** GetNextCandidateField(JSFunction* candidate) { | 482 static JSFunction** GetNextCandidateField(JSFunction* candidate) { |
| 482 return reinterpret_cast<JSFunction**>( | 483 return reinterpret_cast<JSFunction**>( |
| 483 candidate->address() + JSFunction::kCodeEntryOffset); | 484 candidate->address() + JSFunction::kCodeEntryOffset); |
| 484 } | 485 } |
| 485 | 486 |
| 486 | |
| 487 static JSFunction* GetNextCandidate(JSFunction* candidate) { | 487 static JSFunction* GetNextCandidate(JSFunction* candidate) { |
| 488 return *GetNextCandidateField(candidate); | 488 return *GetNextCandidateField(candidate); |
| 489 } | 489 } |
| 490 | 490 |
| 491 | |
| 492 static void SetNextCandidate(JSFunction* candidate, | 491 static void SetNextCandidate(JSFunction* candidate, |
| 493 JSFunction* next_candidate) { | 492 JSFunction* next_candidate) { |
| 494 *GetNextCandidateField(candidate) = next_candidate; | 493 *GetNextCandidateField(candidate) = next_candidate; |
| 495 } | 494 } |
| 496 | 495 |
| 497 | |
| 498 STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart); | 496 STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart); |
| 499 | 497 |
| 500 | |
| 501 static SharedFunctionInfo** GetNextCandidateField( | 498 static SharedFunctionInfo** GetNextCandidateField( |
| 502 SharedFunctionInfo* candidate) { | 499 SharedFunctionInfo* candidate) { |
| 503 Code* code = candidate->unchecked_code(); | 500 Code* code = candidate->unchecked_code(); |
| 504 return reinterpret_cast<SharedFunctionInfo**>( | 501 return reinterpret_cast<SharedFunctionInfo**>( |
| 505 code->address() + Code::kHeaderPaddingStart); | 502 code->address() + Code::kHeaderPaddingStart); |
| 506 } | 503 } |
| 507 | 504 |
| 508 | |
| 509 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) { | 505 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) { |
| 510 return *GetNextCandidateField(candidate); | 506 return *GetNextCandidateField(candidate); |
| 511 } | 507 } |
| 512 | 508 |
| 513 | |
| 514 static void SetNextCandidate(SharedFunctionInfo* candidate, | 509 static void SetNextCandidate(SharedFunctionInfo* candidate, |
| 515 SharedFunctionInfo* next_candidate) { | 510 SharedFunctionInfo* next_candidate) { |
| 516 *GetNextCandidateField(candidate) = next_candidate; | 511 *GetNextCandidateField(candidate) = next_candidate; |
| 517 } | 512 } |
| 518 | 513 |
| 519 static JSFunction* jsfunction_candidates_head_; | 514 Isolate* isolate_; |
| 515 JSFunction* jsfunction_candidates_head_; |
| 516 SharedFunctionInfo* shared_function_info_candidates_head_; |
| 520 | 517 |
| 521 static SharedFunctionInfo* shared_function_info_candidates_head_; | 518 DISALLOW_COPY_AND_ASSIGN(CodeFlusher); |
| 522 }; | 519 }; |
| 523 | 520 |
| 524 JSFunction* FlushCode::jsfunction_candidates_head_ = NULL; | |
| 525 | 521 |
| 526 SharedFunctionInfo* FlushCode::shared_function_info_candidates_head_ = NULL; | 522 MarkCompactCollector::~MarkCompactCollector() { |
| 523 if (code_flusher_ != NULL) { |
| 524 delete code_flusher_; |
| 525 code_flusher_ = NULL; |
| 526 } |
| 527 } |
| 528 |
| 527 | 529 |
| 528 static inline HeapObject* ShortCircuitConsString(Object** p) { | 530 static inline HeapObject* ShortCircuitConsString(Object** p) { |
| 529 // Optimization: If the heap object pointed to by p is a non-symbol | 531 // Optimization: If the heap object pointed to by p is a non-symbol |
| 530 // cons string whose right substring is Heap::empty_string, update | 532 // cons string whose right substring is HEAP->empty_string, update |
| 531 // it in place to its left substring. Return the updated value. | 533 // it in place to its left substring. Return the updated value. |
| 532 // | 534 // |
| 533 // Here we assume that if we change *p, we replace it with a heap object | 535 // Here we assume that if we change *p, we replace it with a heap object |
| 534 // (ie, the left substring of a cons string is always a heap object). | 536 // (ie, the left substring of a cons string is always a heap object). |
| 535 // | 537 // |
| 536 // The check performed is: | 538 // The check performed is: |
| 537 // object->IsConsString() && !object->IsSymbol() && | 539 // object->IsConsString() && !object->IsSymbol() && |
| 538 // (ConsString::cast(object)->second() == Heap::empty_string()) | 540 // (ConsString::cast(object)->second() == HEAP->empty_string()) |
| 539 // except the maps for the object and its possible substrings might be | 541 // except the maps for the object and its possible substrings might be |
| 540 // marked. | 542 // marked. |
| 541 HeapObject* object = HeapObject::cast(*p); | 543 HeapObject* object = HeapObject::cast(*p); |
| 542 InstanceType type = object->map()->instance_type(); | 544 Map* map = object->map(); |
| 545 InstanceType type = map->instance_type(); |
| 543 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; | 546 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; |
| 544 | 547 |
| 545 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); | 548 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); |
| 546 if (second != Heap::raw_unchecked_empty_string()) { | 549 Heap* heap = map->heap(); |
| 550 if (second != heap->raw_unchecked_empty_string()) { |
| 547 return object; | 551 return object; |
| 548 } | 552 } |
| 549 | 553 |
| 550 // Since we don't have the object's start, it is impossible to update the | 554 // Since we don't have the object's start, it is impossible to update the |
| 551 // page dirty marks. Therefore, we only replace the string with its left | 555 // page dirty marks. Therefore, we only replace the string with its left |
| 552 // substring when page dirty marks do not change. | 556 // substring when page dirty marks do not change. |
| 553 // TODO(gc): Seems like we could relax this restriction with store buffers. | 557 // TODO(gc): Seems like we could relax this restriction with store buffers. |
| 554 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); | 558 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); |
| 555 if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object; | 559 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; |
| 556 | 560 |
| 557 *p = first; | 561 *p = first; |
| 558 return HeapObject::cast(first); | 562 return HeapObject::cast(first); |
| 559 } | 563 } |
| 560 | 564 |
| 561 | 565 |
| 562 class StaticMarkingVisitor : public StaticVisitorBase { | 566 class StaticMarkingVisitor : public StaticVisitorBase { |
| 563 public: | 567 public: |
| 564 static inline void IterateBody(Map* map, HeapObject* obj) { | 568 static inline void IterateBody(Map* map, HeapObject* obj) { |
| 565 table_.GetVisitor(map)(map, obj); | 569 table_.GetVisitor(map)(map, obj); |
| 566 } | 570 } |
| 567 | 571 |
| 568 static void EnableCodeFlushing(bool enabled) { | |
| 569 if (enabled) { | |
| 570 table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode); | |
| 571 table_.Register(kVisitSharedFunctionInfo, | |
| 572 &VisitSharedFunctionInfoAndFlushCode); | |
| 573 | |
| 574 } else { | |
| 575 table_.Register(kVisitJSFunction, &VisitJSFunction); | |
| 576 table_.Register(kVisitSharedFunctionInfo, | |
| 577 &VisitSharedFunctionInfoGeneric); | |
| 578 } | |
| 579 } | |
| 580 | |
| 581 static void Initialize() { | 572 static void Initialize() { |
| 582 table_.Register(kVisitShortcutCandidate, | 573 table_.Register(kVisitShortcutCandidate, |
| 583 &FixedBodyVisitor<StaticMarkingVisitor, | 574 &FixedBodyVisitor<StaticMarkingVisitor, |
| 584 ConsString::BodyDescriptor, | 575 ConsString::BodyDescriptor, |
| 585 void>::Visit); | 576 void>::Visit); |
| 586 | 577 |
| 587 table_.Register(kVisitConsString, | 578 table_.Register(kVisitConsString, |
| 588 &FixedBodyVisitor<StaticMarkingVisitor, | 579 &FixedBodyVisitor<StaticMarkingVisitor, |
| 589 ConsString::BodyDescriptor, | 580 ConsString::BodyDescriptor, |
| 590 void>::Visit); | 581 void>::Visit); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 633 | 624 |
| 634 table_.RegisterSpecializations<JSObjectVisitor, | 625 table_.RegisterSpecializations<JSObjectVisitor, |
| 635 kVisitJSObject, | 626 kVisitJSObject, |
| 636 kVisitJSObjectGeneric>(); | 627 kVisitJSObjectGeneric>(); |
| 637 | 628 |
| 638 table_.RegisterSpecializations<StructObjectVisitor, | 629 table_.RegisterSpecializations<StructObjectVisitor, |
| 639 kVisitStruct, | 630 kVisitStruct, |
| 640 kVisitStructGeneric>(); | 631 kVisitStructGeneric>(); |
| 641 } | 632 } |
| 642 | 633 |
| 643 INLINE(static void VisitPointer(Object** p)) { | 634 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
| 644 MarkObjectByPointer(p); | 635 MarkObjectByPointer(heap, p); |
| 645 } | 636 } |
| 646 | 637 |
| 647 INLINE(static void VisitPointers(Object** start, Object** end)) { | 638 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
| 648 // Mark all objects pointed to in [start, end). | 639 // Mark all objects pointed to in [start, end). |
| 649 const int kMinRangeForMarkingRecursion = 64; | 640 const int kMinRangeForMarkingRecursion = 64; |
| 650 if (end - start >= kMinRangeForMarkingRecursion) { | 641 if (end - start >= kMinRangeForMarkingRecursion) { |
| 651 if (VisitUnmarkedObjects(start, end)) return; | 642 if (VisitUnmarkedObjects(heap, start, end)) return; |
| 652 // We are close to a stack overflow, so just mark the objects. | 643 // We are close to a stack overflow, so just mark the objects. |
| 653 } | 644 } |
| 654 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 645 for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p); |
| 655 } | 646 } |
| 656 | 647 |
| 657 static inline void VisitCodeTarget(RelocInfo* rinfo) { | 648 static inline void VisitCodeTarget(RelocInfo* rinfo) { |
| 658 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 649 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 659 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 650 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 660 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { | 651 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { |
| 661 IC::Clear(rinfo->pc()); | 652 IC::Clear(rinfo->pc()); |
| 662 // Please note targets for cleared inline cached do not have to be | 653 // Please note targets for cleared inline cached do not have to be |
| 663 // marked since they are contained in Heap::non_monomorphic_cache(). | 654 // marked since they are contained in HEAP->non_monomorphic_cache(). |
| 664 } else { | 655 } else { |
| 665 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 656 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 666 MarkCompactCollector::MarkObject(code, code_mark); | 657 HEAP->mark_compact_collector()->MarkObject(code, code_mark); |
| 667 } | 658 } |
| 668 } | 659 } |
| 669 | 660 |
| 670 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { | 661 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { |
| 671 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); | 662 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); |
| 672 Object* cell = rinfo->target_cell(); | 663 Object* cell = rinfo->target_cell(); |
| 673 Object* old_cell = cell; | 664 Object* old_cell = cell; |
| 674 VisitPointer(&cell); | 665 VisitPointer(HEAP, &cell); |
| 675 if (cell != old_cell) { | 666 if (cell != old_cell) { |
| 676 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell), | 667 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell), |
| 677 NULL); | 668 NULL); |
| 678 } | 669 } |
| 679 } | 670 } |
| 680 | 671 |
| 681 static inline void VisitDebugTarget(RelocInfo* rinfo) { | 672 static inline void VisitDebugTarget(RelocInfo* rinfo) { |
| 682 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 673 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 683 rinfo->IsPatchedReturnSequence()) || | 674 rinfo->IsPatchedReturnSequence()) || |
| 684 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 675 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 685 rinfo->IsPatchedDebugBreakSlotSequence())); | 676 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 686 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 677 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| 687 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 678 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 688 MarkCompactCollector::MarkObject(code, code_mark); | 679 HEAP->mark_compact_collector()->MarkObject(code, code_mark); |
| 689 } | 680 } |
| 690 | 681 |
| 691 // Mark object pointed to by p. | 682 // Mark object pointed to by p. |
| 692 INLINE(static void MarkObjectByPointer(Object** p)) { | 683 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { |
| 693 if (!(*p)->IsHeapObject()) return; | 684 if (!(*p)->IsHeapObject()) return; |
| 694 HeapObject* object = ShortCircuitConsString(p); | 685 HeapObject* object = ShortCircuitConsString(p); |
| 695 MarkBit mark = Marking::MarkBitFrom(object); | 686 MarkBit mark = heap->marking()->MarkBitFrom(object); |
| 696 MarkCompactCollector::MarkObject(object, mark); | 687 HEAP->mark_compact_collector()->MarkObject(object, mark); |
| 697 } | 688 } |
| 698 | 689 |
| 690 |
| 699 // Visit an unmarked object. | 691 // Visit an unmarked object. |
| 700 static inline void VisitUnmarkedObject(HeapObject* obj) { | 692 static inline void VisitUnmarkedObject(HeapObject* obj) { |
| 701 #ifdef DEBUG | 693 #ifdef DEBUG |
| 702 ASSERT(Heap::Contains(obj)); | 694 ASSERT(HEAP->Contains(obj)); |
| 703 ASSERT(!MarkCompactCollector::IsMarked(obj)); | 695 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj)); |
| 704 #endif | 696 #endif |
| 705 Map* map = obj->map(); | 697 Map* map = obj->map(); |
| 706 MarkBit mark = Marking::MarkBitFrom(obj); | 698 // TODO(gc) ISOLATES MERGE |
| 707 MarkCompactCollector::SetMark(obj, mark); | 699 MarkBit mark = HEAP->marking()->MarkBitFrom(obj); |
| 700 HEAP->mark_compact_collector()->SetMark(obj, mark); |
| 708 // Mark the map pointer and the body. | 701 // Mark the map pointer and the body. |
| 709 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); | 702 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); |
| 710 MarkCompactCollector::MarkObject(map, map_mark); | 703 HEAP->mark_compact_collector()->MarkObject(map, map_mark); |
| 711 IterateBody(map, obj); | 704 IterateBody(map, obj); |
| 712 } | 705 } |
| 713 | 706 |
| 714 // Visit all unmarked objects pointed to by [start, end). | 707 // Visit all unmarked objects pointed to by [start, end). |
| 715 // Returns false if the operation fails (lack of stack space). | 708 // Returns false if the operation fails (lack of stack space). |
| 716 static inline bool VisitUnmarkedObjects(Object** start, Object** end) { | 709 static inline bool VisitUnmarkedObjects(Heap* heap, |
| 710 Object** start, |
| 711 Object** end) { |
| 717 // Return false is we are close to the stack limit. | 712 // Return false is we are close to the stack limit. |
| 718 StackLimitCheck check; | 713 StackLimitCheck check(heap->isolate()); |
| 719 if (check.HasOverflowed()) return false; | 714 if (check.HasOverflowed()) return false; |
| 720 | 715 |
| 721 // Visit the unmarked objects. | 716 // Visit the unmarked objects. |
| 722 for (Object** p = start; p < end; p++) { | 717 for (Object** p = start; p < end; p++) { |
| 723 Object* o = *p; | 718 Object* o = *p; |
| 724 if (!o->IsHeapObject()) continue; | 719 if (!o->IsHeapObject()) continue; |
| 725 HeapObject* obj = HeapObject::cast(o); | 720 HeapObject* obj = HeapObject::cast(o); |
| 726 MarkBit mark = Marking::MarkBitFrom(obj); | 721 MarkBit mark = heap->marking()->MarkBitFrom(obj); |
| 727 if (mark.Get()) continue; | 722 if (mark.Get()) continue; |
| 728 VisitUnmarkedObject(obj); | 723 VisitUnmarkedObject(obj); |
| 729 } | 724 } |
| 730 return true; | 725 return true; |
| 731 } | 726 } |
| 732 | 727 |
| 733 static inline void VisitExternalReference(Address* p) { } | 728 static inline void VisitExternalReference(Address* p) { } |
| 734 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } | 729 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } |
| 735 | 730 |
| 736 private: | 731 private: |
| 737 class DataObjectVisitor { | 732 class DataObjectVisitor { |
| 738 public: | 733 public: |
| 739 template<int size> | 734 template<int size> |
| 740 static void VisitSpecialized(Map* map, HeapObject* object) { | 735 static void VisitSpecialized(Map* map, HeapObject* object) { |
| 741 } | 736 } |
| 742 | 737 |
| 743 static void Visit(Map* map, HeapObject* object) { | 738 static void Visit(Map* map, HeapObject* object) { |
| 744 } | 739 } |
| 745 }; | 740 }; |
| 746 | 741 |
| 747 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 742 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
| 748 JSObject::BodyDescriptor, | 743 JSObject::BodyDescriptor, |
| 749 void> JSObjectVisitor; | 744 void> JSObjectVisitor; |
| 750 | 745 |
| 751 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 746 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
| 752 StructBodyDescriptor, | 747 StructBodyDescriptor, |
| 753 void> StructObjectVisitor; | 748 void> StructObjectVisitor; |
| 754 | 749 |
| 755 static void VisitCode(Map* map, HeapObject* object) { | 750 static void VisitCode(Map* map, HeapObject* object) { |
| 756 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(); | 751 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
| 752 map->heap()); |
| 757 } | 753 } |
| 758 | 754 |
| 759 // Code flushing support. | 755 // Code flushing support. |
| 760 | 756 |
| 761 // How many collections newly compiled code object will survive before being | 757 // How many collections newly compiled code object will survive before being |
| 762 // flushed. | 758 // flushed. |
| 763 static const int kCodeAgeThreshold = 5; | 759 static const int kCodeAgeThreshold = 5; |
| 764 | 760 |
| 765 inline static bool HasSourceCode(SharedFunctionInfo* info) { | 761 inline static bool HasSourceCode(SharedFunctionInfo* info) { |
| 766 Object* undefined = Heap::raw_unchecked_undefined_value(); | 762 Object* undefined = HEAP->raw_unchecked_undefined_value(); |
| 767 return (info->script() != undefined) && | 763 return (info->script() != undefined) && |
| 768 (reinterpret_cast<Script*>(info->script())->source() != undefined); | 764 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 769 } | 765 } |
| 770 | 766 |
| 771 | 767 |
| 772 inline static bool IsCompiled(JSFunction* function) { | 768 inline static bool IsCompiled(JSFunction* function) { |
| 773 return | 769 return function->unchecked_code() != |
| 774 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); | 770 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); |
| 775 } | 771 } |
| 776 | 772 |
| 777 | |
| 778 inline static bool IsCompiled(SharedFunctionInfo* function) { | 773 inline static bool IsCompiled(SharedFunctionInfo* function) { |
| 779 return | 774 return function->unchecked_code() != |
| 780 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); | 775 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); |
| 781 } | 776 } |
| 782 | 777 |
| 783 inline static bool IsFlushable(JSFunction* function) { | 778 inline static bool IsFlushable(JSFunction* function) { |
| 784 SharedFunctionInfo* shared_info = function->unchecked_shared(); | 779 SharedFunctionInfo* shared_info = function->unchecked_shared(); |
| 785 | 780 |
| 786 // Code is either on stack, in compilation cache or referenced | 781 // Code is either on stack, in compilation cache or referenced |
| 787 // by optimized version of function. | 782 // by optimized version of function. |
| 788 MarkBit code_mark = | 783 MarkBit code_mark = |
| 789 Marking::MarkBitFromOldSpace(function->unchecked_code()); | 784 Marking::MarkBitFromOldSpace(function->unchecked_code()); |
| 790 if (code_mark.Get()) { | 785 if (code_mark.Get()) { |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 836 // Age this shared function info. | 831 // Age this shared function info. |
| 837 if (shared_info->code_age() < kCodeAgeThreshold) { | 832 if (shared_info->code_age() < kCodeAgeThreshold) { |
| 838 shared_info->set_code_age(shared_info->code_age() + 1); | 833 shared_info->set_code_age(shared_info->code_age() + 1); |
| 839 return false; | 834 return false; |
| 840 } | 835 } |
| 841 | 836 |
| 842 return true; | 837 return true; |
| 843 } | 838 } |
| 844 | 839 |
| 845 | 840 |
| 846 static bool FlushCodeForFunction(JSFunction* function) { | 841 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) { |
| 847 if (!IsFlushable(function)) return false; | 842 if (!IsFlushable(function)) return false; |
| 848 | 843 |
| 849 // This function's code looks flushable. But we have to postpone the | 844 // This function's code looks flushable. But we have to postpone the |
| 850 // decision until we see all functions that point to the same | 845 // decision until we see all functions that point to the same |
| 851 // SharedFunctionInfo because some of them might be optimized. | 846 // SharedFunctionInfo because some of them might be optimized. |
| 852 // That would make the nonoptimized version of the code nonflushable, | 847 // That would make the nonoptimized version of the code nonflushable, |
| 853 // because it is required for bailing out from optimized code. | 848 // because it is required for bailing out from optimized code. |
| 854 FlushCode::AddCandidate(function); | 849 heap->mark_compact_collector()->code_flusher()->AddCandidate(function); |
| 855 return true; | 850 return true; |
| 856 } | 851 } |
| 857 | 852 |
| 858 | 853 |
| 859 static inline Map* SafeMap(Object* obj) { | 854 static inline Map* SafeMap(Object* obj) { |
| 860 return HeapObject::cast(obj)->map(); | 855 return HeapObject::cast(obj)->map(); |
| 861 } | 856 } |
| 862 | 857 |
| 863 | 858 |
| 864 static inline bool IsJSBuiltinsObject(Object* obj) { | 859 static inline bool IsJSBuiltinsObject(Object* obj) { |
| 865 return obj->IsHeapObject() && | 860 return obj->IsHeapObject() && |
| 866 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); | 861 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); |
| 867 } | 862 } |
| 868 | 863 |
| 869 | 864 |
| 870 static inline bool IsValidNotBuiltinContext(Object* ctx) { | 865 static inline bool IsValidNotBuiltinContext(Object* ctx) { |
| 871 if (!ctx->IsHeapObject()) return false; | 866 if (!ctx->IsHeapObject()) return false; |
| 872 | 867 |
| 873 Map* map = SafeMap(ctx); | 868 Map* map = SafeMap(ctx); |
| 874 if (!(map == Heap::raw_unchecked_context_map() || | 869 if (!(map == HEAP->raw_unchecked_context_map() || |
| 875 map == Heap::raw_unchecked_catch_context_map() || | 870 map == HEAP->raw_unchecked_catch_context_map() || |
| 876 map == Heap::raw_unchecked_global_context_map())) { | 871 map == HEAP->raw_unchecked_global_context_map())) { |
| 877 return false; | 872 return false; |
| 878 } | 873 } |
| 879 | 874 |
| 880 Context* context = reinterpret_cast<Context*>(ctx); | 875 Context* context = reinterpret_cast<Context*>(ctx); |
| 881 | 876 |
| 882 if (IsJSBuiltinsObject(context->global())) { | 877 if (IsJSBuiltinsObject(context->global())) { |
| 883 return false; | 878 return false; |
| 884 } | 879 } |
| 885 | 880 |
| 886 return true; | 881 return true; |
| 887 } | 882 } |
| 888 | 883 |
| 889 | 884 |
| 890 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { | 885 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { |
| 891 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 886 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 892 | 887 |
| 893 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 888 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 894 | 889 |
| 895 FixedBodyVisitor<StaticMarkingVisitor, | 890 FixedBodyVisitor<StaticMarkingVisitor, |
| 896 SharedFunctionInfo::BodyDescriptor, | 891 SharedFunctionInfo::BodyDescriptor, |
| 897 void>::Visit(map, object); | 892 void>::Visit(map, object); |
| 898 } | 893 } |
| 899 | 894 |
| 900 | 895 |
| 901 static void VisitSharedFunctionInfoAndFlushCode(Map* map, | 896 static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| 902 HeapObject* object) { | 897 HeapObject* object) { |
| 898 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 899 if (!collector->is_code_flushing_enabled()) { |
| 900 VisitSharedFunctionInfoGeneric(map, object); |
| 901 return; |
| 902 } |
| 903 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); | 903 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); |
| 904 } | 904 } |
| 905 | 905 |
| 906 | 906 |
| 907 static void VisitSharedFunctionInfoAndFlushCodeGeneric( | 907 static void VisitSharedFunctionInfoAndFlushCodeGeneric( |
| 908 Map* map, HeapObject* object, bool known_flush_code_candidate) { | 908 Map* map, HeapObject* object, bool known_flush_code_candidate) { |
| 909 Heap* heap = map->heap(); |
| 909 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 910 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 910 | 911 |
| 911 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 912 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 912 | 913 |
| 913 if (!known_flush_code_candidate) { | 914 if (!known_flush_code_candidate) { |
| 914 known_flush_code_candidate = IsFlushable(shared); | 915 known_flush_code_candidate = IsFlushable(shared); |
| 915 if (known_flush_code_candidate) FlushCode::AddCandidate(shared); | 916 if (known_flush_code_candidate) { |
| 917 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); |
| 918 } |
| 916 } | 919 } |
| 917 | 920 |
| 918 VisitSharedFunctionInfoFields(object, known_flush_code_candidate); | 921 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); |
| 919 } | 922 } |
| 920 | 923 |
| 921 | 924 |
| 922 static void VisitCodeEntry(Address entry_address) { | 925 static void VisitCodeEntry(Heap* heap, Address entry_address) { |
| 923 Object* code = Code::GetObjectFromEntryAddress(entry_address); | 926 Object* code = Code::GetObjectFromEntryAddress(entry_address); |
| 924 Object* old_code = code; | 927 Object* old_code = code; |
| 925 VisitPointer(&code); | 928 VisitPointer(heap, &code); |
| 926 if (code != old_code) { | 929 if (code != old_code) { |
| 927 Memory::Address_at(entry_address) = | 930 Memory::Address_at(entry_address) = |
| 928 reinterpret_cast<Code*>(code)->entry(); | 931 reinterpret_cast<Code*>(code)->entry(); |
| 929 } | 932 } |
| 930 } | 933 } |
| 931 | 934 |
| 932 | 935 |
| 933 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { | 936 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { |
| 937 Heap* heap = map->heap(); |
| 938 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 939 if (!collector->is_code_flushing_enabled()) { |
| 940 VisitJSFunction(map, object); |
| 941 return; |
| 942 } |
| 943 |
| 934 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); | 944 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); |
| 935 // The function must have a valid context and not be a builtin. | 945 // The function must have a valid context and not be a builtin. |
| 936 bool flush_code_candidate = false; | 946 bool flush_code_candidate = false; |
| 937 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { | 947 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { |
| 938 flush_code_candidate = FlushCodeForFunction(jsfunction); | 948 flush_code_candidate = FlushCodeForFunction(heap, jsfunction); |
| 939 } | 949 } |
| 940 | 950 |
| 941 if (!flush_code_candidate) { | 951 if (!flush_code_candidate) { |
| 942 Code* code = jsfunction->unchecked_shared()->unchecked_code(); | 952 Code* code = jsfunction->unchecked_shared()->unchecked_code(); |
| 943 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 953 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 944 MarkCompactCollector::MarkObject(code, code_mark); | 954 HEAP->mark_compact_collector()->MarkObject(code, code_mark); |
| 945 | 955 |
| 946 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { | 956 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 947 // For optimized functions we should retain both non-optimized version | 957 // For optimized functions we should retain both non-optimized version |
| 948 // of it's code and non-optimized version of all inlined functions. | 958 // of it's code and non-optimized version of all inlined functions. |
| 949 // This is required to support bailing out from inlined code. | 959 // This is required to support bailing out from inlined code. |
| 950 DeoptimizationInputData* data = | 960 DeoptimizationInputData* data = |
| 951 reinterpret_cast<DeoptimizationInputData*>( | 961 reinterpret_cast<DeoptimizationInputData*>( |
| 952 jsfunction->unchecked_code()->unchecked_deoptimization_data()); | 962 jsfunction->unchecked_code()->unchecked_deoptimization_data()); |
| 953 | 963 |
| 954 FixedArray* literals = data->UncheckedLiteralArray(); | 964 FixedArray* literals = data->UncheckedLiteralArray(); |
| 955 | 965 |
| 956 for (int i = 0, count = data->InlinedFunctionCount()->value(); | 966 for (int i = 0, count = data->InlinedFunctionCount()->value(); |
| 957 i < count; | 967 i < count; |
| 958 i++) { | 968 i++) { |
| 959 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); | 969 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); |
| 960 Code* inlined_code = inlined->unchecked_shared()->unchecked_code(); | 970 Code* inlined_code = inlined->unchecked_shared()->unchecked_code(); |
| 961 MarkBit inlined_code_mark = | 971 MarkBit inlined_code_mark = |
| 962 Marking::MarkBitFromOldSpace(inlined_code); | 972 Marking::MarkBitFromOldSpace(inlined_code); |
| 963 MarkCompactCollector::MarkObject(inlined_code, inlined_code_mark); | 973 HEAP->mark_compact_collector()->MarkObject( |
| 974 inlined_code, inlined_code_mark); |
| 964 } | 975 } |
| 965 } | 976 } |
| 966 } | 977 } |
| 967 | 978 |
| 968 VisitJSFunctionFields(map, | 979 VisitJSFunctionFields(map, |
| 969 reinterpret_cast<JSFunction*>(object), | 980 reinterpret_cast<JSFunction*>(object), |
| 970 flush_code_candidate); | 981 flush_code_candidate); |
| 971 } | 982 } |
| 972 | 983 |
| 973 | 984 |
| 974 static void VisitJSFunction(Map* map, HeapObject* object) { | 985 static void VisitJSFunction(Map* map, HeapObject* object) { |
| 975 VisitJSFunctionFields(map, | 986 VisitJSFunctionFields(map, |
| 976 reinterpret_cast<JSFunction*>(object), | 987 reinterpret_cast<JSFunction*>(object), |
| 977 false); | 988 false); |
| 978 } | 989 } |
| 979 | 990 |
| 980 | 991 |
| 981 #define SLOT_ADDR(obj, offset) \ | 992 #define SLOT_ADDR(obj, offset) \ |
| 982 reinterpret_cast<Object**>((obj)->address() + offset) | 993 reinterpret_cast<Object**>((obj)->address() + offset) |
| 983 | 994 |
| 984 | 995 |
| 985 static inline void VisitJSFunctionFields(Map* map, | 996 static inline void VisitJSFunctionFields(Map* map, |
| 986 JSFunction* object, | 997 JSFunction* object, |
| 987 bool flush_code_candidate) { | 998 bool flush_code_candidate) { |
| 988 VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset), | 999 Heap* heap = map->heap(); |
| 1000 |
| 1001 VisitPointers(heap, |
| 1002 SLOT_ADDR(object, JSFunction::kPropertiesOffset), |
| 989 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); | 1003 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); |
| 990 | 1004 |
| 991 if (!flush_code_candidate) { | 1005 if (!flush_code_candidate) { |
| 992 VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset); | 1006 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
| 993 } else { | 1007 } else { |
| 994 // Don't visit code object. | 1008 // Don't visit code object. |
| 995 | 1009 |
| 996 // Visit shared function info to avoid double checking of it's | 1010 // Visit shared function info to avoid double checking of it's |
| 997 // flushability. | 1011 // flushability. |
| 998 SharedFunctionInfo* shared_info = object->unchecked_shared(); | 1012 SharedFunctionInfo* shared_info = object->unchecked_shared(); |
| 999 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info); | 1013 MarkBit shared_info_mark = heap->marking()->MarkBitFrom(shared_info); |
| 1000 if (!shared_info_mark.Get()) { | 1014 if (!shared_info_mark.Get()) { |
| 1001 Map* shared_info_map = shared_info->map(); | 1015 Map* shared_info_map = shared_info->map(); |
| 1002 MarkBit shared_info_map_mark = | 1016 MarkBit shared_info_map_mark = |
| 1003 Marking::MarkBitFromOldSpace(shared_info_map); | 1017 Marking::MarkBitFromOldSpace(shared_info_map); |
| 1004 MarkCompactCollector::SetMark(shared_info, shared_info_mark); | 1018 HEAP->mark_compact_collector()->SetMark(shared_info, shared_info_mark); |
| 1005 MarkCompactCollector::MarkObject(shared_info_map, shared_info_map_mark); | 1019 HEAP->mark_compact_collector()->MarkObject(shared_info_map, |
| 1020 shared_info_map_mark); |
| 1006 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, | 1021 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, |
| 1007 shared_info, | 1022 shared_info, |
| 1008 true); | 1023 true); |
| 1009 } | 1024 } |
| 1010 } | 1025 } |
| 1011 | 1026 |
| 1012 VisitPointers(SLOT_ADDR(object, | 1027 VisitPointers(heap, |
| 1028 SLOT_ADDR(object, |
| 1013 JSFunction::kCodeEntryOffset + kPointerSize), | 1029 JSFunction::kCodeEntryOffset + kPointerSize), |
| 1014 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); | 1030 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); |
| 1015 | 1031 |
| 1016 // Don't visit the next function list field as it is a weak reference. | 1032 // Don't visit the next function list field as it is a weak reference. |
| 1017 } | 1033 } |
| 1018 | 1034 |
| 1019 | 1035 |
| 1020 static void VisitSharedFunctionInfoFields(HeapObject* object, | 1036 static void VisitSharedFunctionInfoFields(Heap* heap, |
| 1037 HeapObject* object, |
| 1021 bool flush_code_candidate) { | 1038 bool flush_code_candidate) { |
| 1022 VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); | 1039 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); |
| 1023 | 1040 |
| 1024 if (!flush_code_candidate) { | 1041 if (!flush_code_candidate) { |
| 1025 VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); | 1042 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); |
| 1026 } | 1043 } |
| 1027 | 1044 |
| 1028 VisitPointers(SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset), | 1045 VisitPointers(heap, |
| 1046 SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset), |
| 1029 SLOT_ADDR(object, SharedFunctionInfo::kSize)); | 1047 SLOT_ADDR(object, SharedFunctionInfo::kSize)); |
| 1030 } | 1048 } |
| 1031 | 1049 |
| 1032 #undef SLOT_ADDR | 1050 #undef SLOT_ADDR |
| 1033 | 1051 |
| 1034 typedef void (*Callback)(Map* map, HeapObject* object); | 1052 typedef void (*Callback)(Map* map, HeapObject* object); |
| 1035 | 1053 |
| 1036 static VisitorDispatchTable<Callback> table_; | 1054 static VisitorDispatchTable<Callback> table_; |
| 1037 }; | 1055 }; |
| 1038 | 1056 |
| 1039 | 1057 |
| 1040 VisitorDispatchTable<StaticMarkingVisitor::Callback> | 1058 VisitorDispatchTable<StaticMarkingVisitor::Callback> |
| 1041 StaticMarkingVisitor::table_; | 1059 StaticMarkingVisitor::table_; |
| 1042 | 1060 |
| 1043 | 1061 |
| 1044 class MarkingVisitor : public ObjectVisitor { | 1062 class MarkingVisitor : public ObjectVisitor { |
| 1045 public: | 1063 public: |
| 1064 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } |
| 1065 |
| 1046 void VisitPointer(Object** p) { | 1066 void VisitPointer(Object** p) { |
| 1047 StaticMarkingVisitor::VisitPointer(p); | 1067 StaticMarkingVisitor::VisitPointer(heap_, p); |
| 1048 } | 1068 } |
| 1049 | 1069 |
| 1050 void VisitPointers(Object** start, Object** end) { | 1070 void VisitPointers(Object** start, Object** end) { |
| 1051 StaticMarkingVisitor::VisitPointers(start, end); | 1071 StaticMarkingVisitor::VisitPointers(heap_, start, end); |
| 1052 } | 1072 } |
| 1053 | 1073 |
| 1054 void VisitCodeTarget(RelocInfo* rinfo) { | 1074 void VisitCodeTarget(RelocInfo* rinfo) { |
| 1055 StaticMarkingVisitor::VisitCodeTarget(rinfo); | 1075 StaticMarkingVisitor::VisitCodeTarget(rinfo); |
| 1056 } | 1076 } |
| 1057 | 1077 |
| 1058 void VisitGlobalPropertyCell(RelocInfo* rinfo) { | 1078 void VisitGlobalPropertyCell(RelocInfo* rinfo) { |
| 1059 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo); | 1079 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo); |
| 1060 } | 1080 } |
| 1061 | 1081 |
| 1062 void VisitDebugTarget(RelocInfo* rinfo) { | 1082 void VisitDebugTarget(RelocInfo* rinfo) { |
| 1063 StaticMarkingVisitor::VisitDebugTarget(rinfo); | 1083 StaticMarkingVisitor::VisitDebugTarget(rinfo); |
| 1064 } | 1084 } |
| 1085 |
| 1086 private: |
| 1087 Heap* heap_; |
| 1065 }; | 1088 }; |
| 1066 | 1089 |
| 1067 | 1090 |
| 1068 class CodeMarkingVisitor : public ThreadVisitor { | 1091 class CodeMarkingVisitor : public ThreadVisitor { |
| 1069 public: | 1092 public: |
| 1093 explicit CodeMarkingVisitor(MarkCompactCollector* collector) |
| 1094 : collector_(collector) {} |
| 1095 |
| 1070 void VisitThread(ThreadLocalTop* top) { | 1096 void VisitThread(ThreadLocalTop* top) { |
| 1071 for (StackFrameIterator it(top); !it.done(); it.Advance()) { | 1097 for (StackFrameIterator it(top); !it.done(); it.Advance()) { |
| 1072 Code* code = it.frame()->unchecked_code(); | 1098 Code* code = it.frame()->unchecked_code(); |
| 1073 MarkBit code_bit = Marking::MarkBitFromOldSpace(code); | 1099 MarkBit code_bit = Marking::MarkBitFromOldSpace(code); |
| 1074 MarkCompactCollector::MarkObject(it.frame()->unchecked_code(), code_bit); | 1100 HEAP->mark_compact_collector()->MarkObject( |
| 1101 it.frame()->unchecked_code(), code_bit); |
| 1075 } | 1102 } |
| 1076 } | 1103 } |
| 1104 |
| 1105 private: |
| 1106 MarkCompactCollector* collector_; |
| 1077 }; | 1107 }; |
| 1078 | 1108 |
| 1079 | 1109 |
| 1080 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { | 1110 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { |
| 1081 public: | 1111 public: |
| 1112 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) |
| 1113 : collector_(collector) {} |
| 1114 |
| 1082 void VisitPointers(Object** start, Object** end) { | 1115 void VisitPointers(Object** start, Object** end) { |
| 1083 for (Object** p = start; p < end; p++) VisitPointer(p); | 1116 for (Object** p = start; p < end; p++) VisitPointer(p); |
| 1084 } | 1117 } |
| 1085 | 1118 |
| 1086 void VisitPointer(Object** slot) { | 1119 void VisitPointer(Object** slot) { |
| 1087 Object* obj = *slot; | 1120 Object* obj = *slot; |
| 1088 if (obj->IsSharedFunctionInfo()) { | 1121 if (obj->IsSharedFunctionInfo()) { |
| 1089 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); | 1122 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); |
| 1090 MarkBit shared_mark = Marking::MarkBitFrom(shared); | 1123 // TODO(gc) ISOLATES MERGE |
| 1124 MarkBit shared_mark = HEAP->marking()->MarkBitFrom(shared); |
| 1091 MarkBit code_mark = | 1125 MarkBit code_mark = |
| 1092 Marking::MarkBitFromOldSpace(shared->unchecked_code()); | 1126 HEAP->marking()->MarkBitFromOldSpace(shared->unchecked_code()); |
| 1093 MarkCompactCollector::MarkObject(shared->unchecked_code(), code_mark); | 1127 HEAP->mark_compact_collector()->MarkObject(shared->unchecked_code(), |
| 1094 MarkCompactCollector::MarkObject(shared, shared_mark); | 1128 code_mark); |
| 1129 HEAP->mark_compact_collector()->MarkObject(shared, shared_mark); |
| 1095 } | 1130 } |
| 1096 } | 1131 } |
| 1132 |
| 1133 private: |
| 1134 MarkCompactCollector* collector_; |
| 1097 }; | 1135 }; |
| 1098 | 1136 |
| 1099 | 1137 |
| 1100 void MarkCompactCollector::PrepareForCodeFlushing() { | 1138 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 1139 ASSERT(heap_ == Isolate::Current()->heap()); |
| 1140 |
| 1101 if (!FLAG_flush_code) { | 1141 if (!FLAG_flush_code) { |
| 1102 StaticMarkingVisitor::EnableCodeFlushing(false); | 1142 EnableCodeFlushing(false); |
| 1103 return; | 1143 return; |
| 1104 } | 1144 } |
| 1105 | 1145 |
| 1106 #ifdef ENABLE_DEBUGGER_SUPPORT | 1146 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 1107 if (Debug::IsLoaded() || Debug::has_break_points()) { | 1147 if (heap_->isolate()->debug()->IsLoaded() || |
| 1108 StaticMarkingVisitor::EnableCodeFlushing(false); | 1148 heap_->isolate()->debug()->has_break_points()) { |
| 1149 EnableCodeFlushing(false); |
| 1109 return; | 1150 return; |
| 1110 } | 1151 } |
| 1111 #endif | 1152 #endif |
| 1112 StaticMarkingVisitor::EnableCodeFlushing(true); | 1153 EnableCodeFlushing(true); |
| 1113 | 1154 |
| 1114 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray | 1155 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray |
| 1115 // relies on it being marked before any other descriptor array. | 1156 // relies on it being marked before any other descriptor array. |
| 1116 HeapObject* descriptor_array = Heap::raw_unchecked_empty_descriptor_array(); | 1157 HeapObject* descriptor_array = HEAP->raw_unchecked_empty_descriptor_array(); |
| 1117 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array); | 1158 // TODO(gc) ISOLATES MERGE |
| 1159 MarkBit descriptor_array_mark = |
| 1160 HEAP->marking()->MarkBitFrom(descriptor_array); |
| 1118 MarkObject(descriptor_array, descriptor_array_mark); | 1161 MarkObject(descriptor_array, descriptor_array_mark); |
| 1119 | 1162 |
| 1120 // Make sure we are not referencing the code from the stack. | 1163 // Make sure we are not referencing the code from the stack. |
| 1164 ASSERT(this == heap_->mark_compact_collector()); |
| 1121 for (StackFrameIterator it; !it.done(); it.Advance()) { | 1165 for (StackFrameIterator it; !it.done(); it.Advance()) { |
| 1122 Code* code = it.frame()->unchecked_code(); | 1166 Code* code = it.frame()->unchecked_code(); |
| 1123 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); | 1167 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); |
| 1124 MarkObject(code, code_mark); | 1168 MarkObject(code, code_mark); |
| 1125 } | 1169 } |
| 1126 | 1170 |
| 1127 // Iterate the archived stacks in all threads to check if | 1171 // Iterate the archived stacks in all threads to check if |
| 1128 // the code is referenced. | 1172 // the code is referenced. |
| 1129 CodeMarkingVisitor code_marking_visitor; | 1173 CodeMarkingVisitor code_marking_visitor(this); |
| 1130 ThreadManager::IterateArchivedThreads(&code_marking_visitor); | 1174 heap_->isolate()->thread_manager()->IterateArchivedThreads( |
| 1175 &code_marking_visitor); |
| 1131 | 1176 |
| 1132 SharedFunctionInfoMarkingVisitor visitor; | 1177 SharedFunctionInfoMarkingVisitor visitor(this); |
| 1133 CompilationCache::IterateFunctions(&visitor); | 1178 heap_->isolate()->compilation_cache()->IterateFunctions(&visitor); |
| 1134 HandleScopeImplementer::Iterate(&visitor); | 1179 heap_->isolate()->handle_scope_implementer()->Iterate(&visitor); |
| 1135 | 1180 |
| 1136 ProcessMarkingStack(); | 1181 ProcessMarkingStack(); |
| 1137 } | 1182 } |
| 1138 | 1183 |
| 1139 | 1184 |
| 1140 // Visitor class for marking heap roots. | 1185 // Visitor class for marking heap roots. |
| 1141 class RootMarkingVisitor : public ObjectVisitor { | 1186 class RootMarkingVisitor : public ObjectVisitor { |
| 1142 public: | 1187 public: |
| 1188 explicit RootMarkingVisitor(Heap* heap) |
| 1189 : collector_(heap->mark_compact_collector()) { } |
| 1190 |
| 1143 void VisitPointer(Object** p) { | 1191 void VisitPointer(Object** p) { |
| 1144 MarkObjectByPointer(p); | 1192 MarkObjectByPointer(p); |
| 1145 } | 1193 } |
| 1146 | 1194 |
| 1147 void VisitPointers(Object** start, Object** end) { | 1195 void VisitPointers(Object** start, Object** end) { |
| 1148 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 1196 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| 1149 } | 1197 } |
| 1150 | 1198 |
| 1151 private: | 1199 private: |
| 1152 void MarkObjectByPointer(Object** p) { | 1200 void MarkObjectByPointer(Object** p) { |
| 1153 if (!(*p)->IsHeapObject()) return; | 1201 if (!(*p)->IsHeapObject()) return; |
| 1154 | 1202 |
| 1155 // Replace flat cons strings in place. | 1203 // Replace flat cons strings in place. |
| 1156 HeapObject* object = ShortCircuitConsString(p); | 1204 HeapObject* object = ShortCircuitConsString(p); |
| 1157 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1205 // TODO(gc) ISOLATES MERGE |
| 1206 MarkBit mark_bit = HEAP->marking()->MarkBitFrom(object); |
| 1158 if (mark_bit.Get()) return; | 1207 if (mark_bit.Get()) return; |
| 1159 | 1208 |
| 1160 Map* map = object->map(); | 1209 Map* map = object->map(); |
| 1161 // Mark the object. | 1210 // Mark the object. |
| 1162 MarkCompactCollector::SetMark(object, mark_bit); | 1211 HEAP->mark_compact_collector()->SetMark(object, mark_bit); |
| 1163 | 1212 |
| 1164 // Mark the map pointer and body, and push them on the marking stack. | 1213 // Mark the map pointer and body, and push them on the marking stack. |
| 1165 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); | 1214 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); |
| 1166 MarkCompactCollector::MarkObject(map, map_mark); | 1215 HEAP->mark_compact_collector()->MarkObject(map, map_mark); |
| 1167 StaticMarkingVisitor::IterateBody(map, object); | 1216 StaticMarkingVisitor::IterateBody(map, object); |
| 1168 | 1217 |
| 1169 // Mark all the objects reachable from the map and body. May leave | 1218 // Mark all the objects reachable from the map and body. May leave |
| 1170 // overflowed objects in the heap. | 1219 // overflowed objects in the heap. |
| 1171 MarkCompactCollector::EmptyMarkingStack(); | 1220 collector_->EmptyMarkingStack(); |
| 1172 } | 1221 } |
| 1222 |
| 1223 MarkCompactCollector* collector_; |
| 1173 }; | 1224 }; |
| 1174 | 1225 |
| 1175 | 1226 |
| 1176 // Helper class for pruning the symbol table. | 1227 // Helper class for pruning the symbol table. |
| 1177 class SymbolTableCleaner : public ObjectVisitor { | 1228 class SymbolTableCleaner : public ObjectVisitor { |
| 1178 public: | 1229 public: |
| 1179 SymbolTableCleaner() : pointers_removed_(0) { } | 1230 SymbolTableCleaner() : pointers_removed_(0) { } |
| 1180 | 1231 |
| 1181 virtual void VisitPointers(Object** start, Object** end) { | 1232 virtual void VisitPointers(Object** start, Object** end) { |
| 1182 // Visit all HeapObject pointers in [start, end). | 1233 // Visit all HeapObject pointers in [start, end). |
| 1183 for (Object** p = start; p < end; p++) { | 1234 for (Object** p = start; p < end; p++) { |
| 1184 Object* o = *p; | 1235 Object* o = *p; |
| 1236 // TODO(gc) ISOLATES MERGE |
| 1185 if (o->IsHeapObject() && | 1237 if (o->IsHeapObject() && |
| 1186 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { | 1238 !HEAP->marking()->MarkBitFrom(HeapObject::cast(o)).Get()) { |
| 1187 // Check if the symbol being pruned is an external symbol. We need to | 1239 // Check if the symbol being pruned is an external symbol. We need to |
| 1188 // delete the associated external data as this symbol is going away. | 1240 // delete the associated external data as this symbol is going away. |
| 1189 | 1241 |
| 1190 // Since no objects have yet been moved we can safely access the map of | 1242 // Since no objects have yet been moved we can safely access the map of |
| 1191 // the object. | 1243 // the object. |
| 1192 if (o->IsExternalString()) { | 1244 if (o->IsExternalString()) { |
| 1193 Heap::FinalizeExternalString(String::cast(o)); | 1245 HEAP->FinalizeExternalString(String::cast(*p)); |
| 1194 } | 1246 } |
| 1195 // Set the entry to null_value (as deleted). | 1247 // Set the entry to null_value (as deleted). |
| 1196 *p = Heap::raw_unchecked_null_value(); | 1248 *p = HEAP->raw_unchecked_null_value(); |
| 1197 pointers_removed_++; | 1249 pointers_removed_++; |
| 1198 } | 1250 } |
| 1199 } | 1251 } |
| 1200 } | 1252 } |
| 1201 | 1253 |
| 1202 int PointersRemoved() { | 1254 int PointersRemoved() { |
| 1203 return pointers_removed_; | 1255 return pointers_removed_; |
| 1204 } | 1256 } |
| 1205 private: | 1257 private: |
| 1206 int pointers_removed_; | 1258 int pointers_removed_; |
| 1207 }; | 1259 }; |
| 1208 | 1260 |
| 1209 | 1261 |
| 1210 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1262 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
| 1211 // are retained. | 1263 // are retained. |
| 1212 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1264 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
| 1213 public: | 1265 public: |
| 1214 virtual Object* RetainAs(Object* object) { | 1266 virtual Object* RetainAs(Object* object) { |
| 1215 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { | 1267 // TODO(gc) ISOLATES MERGE |
| 1268 if (HEAP->marking()->MarkBitFrom(HeapObject::cast(object)).Get()) { |
| 1216 return object; | 1269 return object; |
| 1217 } else { | 1270 } else { |
| 1218 return NULL; | 1271 return NULL; |
| 1219 } | 1272 } |
| 1220 } | 1273 } |
| 1221 }; | 1274 }; |
| 1222 | 1275 |
| 1223 | 1276 |
| 1224 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { | 1277 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
| 1225 ASSERT(IsMarked(object)); | 1278 ASSERT(IsMarked(object)); |
| 1226 ASSERT(Heap::Contains(object)); | 1279 ASSERT(HEAP->Contains(object)); |
| 1227 if (object->IsMap()) { | 1280 if (object->IsMap()) { |
| 1228 Map* map = Map::cast(object); | 1281 Map* map = Map::cast(object); |
| 1229 if (FLAG_cleanup_caches_in_maps_at_gc) { | 1282 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 1230 map->ClearCodeCache(); | 1283 map->ClearCodeCache(heap_); |
| 1231 } | 1284 } |
| 1232 if (FLAG_collect_maps && | 1285 if (FLAG_collect_maps && |
| 1233 map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 1286 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 1234 map->instance_type() <= JS_FUNCTION_TYPE) { | 1287 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 1235 MarkMapContents(map); | 1288 MarkMapContents(map); |
| 1236 } else { | 1289 } else { |
| 1237 marking_stack.Push(map); | 1290 marking_stack_.Push(map); |
| 1238 } | 1291 } |
| 1239 } else { | 1292 } else { |
| 1240 marking_stack.Push(object); | 1293 marking_stack_.Push(object); |
| 1241 } | 1294 } |
| 1242 } | 1295 } |
| 1243 | 1296 |
| 1244 | 1297 |
| 1245 void MarkCompactCollector::MarkMapContents(Map* map) { | 1298 void MarkCompactCollector::MarkMapContents(Map* map) { |
| 1246 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( | 1299 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( |
| 1247 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); | 1300 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); |
| 1248 | 1301 |
| 1249 // Mark the Object* fields of the Map. | 1302 // Mark the Object* fields of the Map. |
| 1250 // Since the descriptor array has been marked already, it is fine | 1303 // Since the descriptor array has been marked already, it is fine |
| 1251 // that one of these fields contains a pointer to it. | 1304 // that one of these fields contains a pointer to it. |
| 1252 Object** start_slot = HeapObject::RawField(map, | 1305 Object** start_slot = HeapObject::RawField(map, |
| 1253 Map::kPointerFieldsBeginOffset); | 1306 Map::kPointerFieldsBeginOffset); |
| 1254 | 1307 |
| 1255 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); | 1308 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); |
| 1256 | 1309 |
| 1257 StaticMarkingVisitor::VisitPointers(start_slot, end_slot); | 1310 StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot); |
| 1258 } | 1311 } |
| 1259 | 1312 |
| 1260 | 1313 |
| 1261 void MarkCompactCollector::MarkDescriptorArray( | 1314 void MarkCompactCollector::MarkDescriptorArray( |
| 1262 DescriptorArray* descriptors) { | 1315 DescriptorArray* descriptors) { |
| 1263 MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors); | 1316 // TODO(gc) ISOLATES MERGE |
| 1317 MarkBit descriptors_mark = HEAP->marking()->MarkBitFrom(descriptors); |
| 1264 if (descriptors_mark.Get()) return; | 1318 if (descriptors_mark.Get()) return; |
| 1265 // Empty descriptor array is marked as a root before any maps are marked. | 1319 // Empty descriptor array is marked as a root before any maps are marked. |
| 1266 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); | 1320 ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array()); |
| 1267 SetMark(descriptors, descriptors_mark); | 1321 SetMark(descriptors, descriptors_mark); |
| 1268 | 1322 |
| 1269 FixedArray* contents = reinterpret_cast<FixedArray*>( | 1323 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 1270 descriptors->get(DescriptorArray::kContentArrayIndex)); | 1324 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 1271 ASSERT(contents->IsHeapObject()); | 1325 ASSERT(contents->IsHeapObject()); |
| 1272 ASSERT(!IsMarked(contents)); | 1326 ASSERT(!IsMarked(contents)); |
| 1273 ASSERT(contents->IsFixedArray()); | 1327 ASSERT(contents->IsFixedArray()); |
| 1274 ASSERT(contents->length() >= 2); | 1328 ASSERT(contents->length() >= 2); |
| 1275 MarkBit contents_mark = Marking::MarkBitFrom(contents); | 1329 // TODO(gc) ISOLATES MERGE |
| 1330 MarkBit contents_mark = HEAP->marking()->MarkBitFrom(contents); |
| 1276 SetMark(contents, contents_mark); | 1331 SetMark(contents, contents_mark); |
| 1277 // Contents contains (value, details) pairs. If the details say that | 1332 // Contents contains (value, details) pairs. If the details say that |
| 1278 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or | 1333 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or |
| 1279 // NULL_DESCRIPTOR, we don't mark the value as live. Only for | 1334 // NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 1280 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a | 1335 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a |
| 1281 // Map*). | 1336 // Map*). |
| 1282 for (int i = 0; i < contents->length(); i += 2) { | 1337 for (int i = 0; i < contents->length(); i += 2) { |
| 1283 // If the pair (value, details) at index i, i+1 is not | 1338 // If the pair (value, details) at index i, i+1 is not |
| 1284 // a transition or null descriptor, mark the value. | 1339 // a transition or null descriptor, mark the value. |
| 1285 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 1340 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 1286 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { | 1341 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 1287 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); | 1342 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| 1288 if (object->IsHeapObject()) { | 1343 if (object->IsHeapObject()) { |
| 1289 MarkBit mark = Marking::MarkBitFrom(HeapObject::cast(object)); | 1344 // TODO(gc) ISOLATES MERGE |
| 1345 MarkBit mark = HEAP->marking()->MarkBitFrom(HeapObject::cast(object)); |
| 1290 if (!mark.Get()) { | 1346 if (!mark.Get()) { |
| 1291 SetMark(HeapObject::cast(object), mark); | 1347 SetMark(HeapObject::cast(object), mark); |
| 1292 marking_stack.Push(object); | 1348 marking_stack_.Push(object); |
| 1293 } | 1349 } |
| 1294 } | 1350 } |
| 1295 } | 1351 } |
| 1296 } | 1352 } |
| 1297 // The DescriptorArray descriptors contains a pointer to its contents array, | 1353 // The DescriptorArray descriptors contains a pointer to its contents array, |
| 1298 // but the contents array is already marked. | 1354 // but the contents array is already marked. |
| 1299 marking_stack.Push(descriptors); | 1355 marking_stack_.Push(descriptors); |
| 1300 } | 1356 } |
| 1301 | 1357 |
| 1302 | 1358 |
| 1303 void MarkCompactCollector::CreateBackPointers() { | 1359 void MarkCompactCollector::CreateBackPointers() { |
| 1304 HeapObjectIterator iterator(Heap::map_space()); | 1360 HeapObjectIterator iterator(HEAP->map_space()); |
| 1305 for (HeapObject* next_object = iterator.Next(); | 1361 for (HeapObject* next_object = iterator.Next(); |
| 1306 next_object != NULL; next_object = iterator.Next()) { | 1362 next_object != NULL; next_object = iterator.Next()) { |
| 1307 if (next_object->IsMap()) { // Could also be FreeSpace object on free list. | 1363 if (next_object->IsMap()) { // Could also be FreeSpace object on free list. |
| 1308 Map* map = Map::cast(next_object); | 1364 Map* map = Map::cast(next_object); |
| 1309 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 1365 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 1310 map->instance_type() <= JS_FUNCTION_TYPE) { | 1366 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 1311 map->CreateBackPointers(); | 1367 map->CreateBackPointers(); |
| 1312 } else { | 1368 } else { |
| 1313 ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array()); | 1369 ASSERT(map->instance_descriptors() == HEAP->empty_descriptor_array()); |
| 1314 } | 1370 } |
| 1315 } | 1371 } |
| 1316 } | 1372 } |
| 1317 } | 1373 } |
| 1318 | 1374 |
| 1319 | 1375 |
| 1376 #if 0 |
| 1320 static int OverflowObjectSize(HeapObject* obj) { | 1377 static int OverflowObjectSize(HeapObject* obj) { |
| 1321 // Recover the normal map pointer, it might be marked as live and | 1378 // Recover the normal map pointer, it might be marked as live and |
| 1322 // overflowed. | 1379 // overflowed. |
| 1323 return obj->Size(); | 1380 return obj->Size(); |
| 1324 } | 1381 } |
| 1382 #endif |
| 1325 | 1383 |
| 1326 | 1384 |
| 1327 // Fill the marking stack with overflowed objects returned by the given | 1385 // Fill the marking stack with overflowed objects returned by the given |
| 1328 // iterator. Stop when the marking stack is filled or the end of the space | 1386 // iterator. Stop when the marking stack is filled or the end of the space |
| 1329 // is reached, whichever comes first. | 1387 // is reached, whichever comes first. |
| 1330 template<class T> | 1388 template<class T> |
| 1331 static void ScanOverflowedObjects(T* it) { | 1389 static void ScanOverflowedObjects(T* it) { |
| 1332 #if 0 | 1390 #if 0 |
| 1333 // The caller should ensure that the marking stack is initially not full, | 1391 // The caller should ensure that the marking stack is initially not full, |
| 1334 // so that we don't waste effort pointlessly scanning for objects. | 1392 // so that we don't waste effort pointlessly scanning for objects. |
| 1335 ASSERT(!marking_stack.is_full()); | 1393 ASSERT(!marking_stack.is_full()); |
| 1336 | 1394 |
| 1337 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { | 1395 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { |
| 1338 if (object->IsOverflowed()) { | 1396 if (object->IsOverflowed()) { |
| 1339 object->ClearOverflow(); | 1397 object->ClearOverflow(); |
| 1340 ASSERT(MarkCompactCollector::IsMarked(object)); | 1398 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); |
| 1341 ASSERT(Heap::Contains(object)); | 1399 ASSERT(HEAP->Contains(object)); |
| 1342 marking_stack.Push(object); | 1400 marking_stack.Push(object); |
| 1343 if (marking_stack.is_full()) return; | 1401 if (marking_stack.is_full()) return; |
| 1344 } | 1402 } |
| 1345 } | 1403 } |
| 1346 #endif | 1404 #endif |
| 1347 UNREACHABLE(); | 1405 UNREACHABLE(); |
| 1348 } | 1406 } |
| 1349 | 1407 |
| 1350 | 1408 |
| 1351 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1409 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
| 1352 Object* o = *p; | 1410 Object* o = *p; |
| 1353 if (!o->IsHeapObject()) return false; | 1411 if (!o->IsHeapObject()) return false; |
| 1354 HeapObject* heap_object = HeapObject::cast(o); | 1412 HeapObject* heap_object = HeapObject::cast(o); |
| 1355 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1413 // TODO(gc) ISOLATES MERGE |
| 1414 MarkBit mark = HEAP->marking()->MarkBitFrom(heap_object); |
| 1356 return !mark.Get(); | 1415 return !mark.Get(); |
| 1357 } | 1416 } |
| 1358 | 1417 |
| 1359 | 1418 |
| 1360 void MarkCompactCollector::MarkSymbolTable() { | 1419 void MarkCompactCollector::MarkSymbolTable() { |
| 1361 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); | 1420 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); |
| 1362 // Mark the symbol table itself. | 1421 // Mark the symbol table itself. |
| 1363 MarkBit symbol_table_mark = Marking::MarkBitFrom(symbol_table); | 1422 MarkBit symbol_table_mark = heap_->marking()->MarkBitFrom(symbol_table); |
| 1364 SetMark(symbol_table, symbol_table_mark); | 1423 SetMark(symbol_table, symbol_table_mark); |
| 1365 // Explicitly mark the prefix. | 1424 // Explicitly mark the prefix. |
| 1366 MarkingVisitor marker; | 1425 MarkingVisitor marker(heap_); |
| 1367 symbol_table->IteratePrefix(&marker); | 1426 symbol_table->IteratePrefix(&marker); |
| 1368 ProcessMarkingStack(); | 1427 ProcessMarkingStack(); |
| 1369 } | 1428 } |
| 1370 | 1429 |
| 1371 | 1430 |
| 1372 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1431 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 1373 // Mark the heap roots including global variables, stack variables, | 1432 // Mark the heap roots including global variables, stack variables, |
| 1374 // etc., and all objects reachable from them. | 1433 // etc., and all objects reachable from them. |
| 1375 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1434 HEAP->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 1376 | 1435 |
| 1377 // Handle the symbol table specially. | 1436 // Handle the symbol table specially. |
| 1378 MarkSymbolTable(); | 1437 MarkSymbolTable(); |
| 1379 | 1438 |
| 1380 // There may be overflowed objects in the heap. Visit them now. | 1439 // There may be overflowed objects in the heap. Visit them now. |
| 1381 while (marking_stack.overflowed()) { | 1440 while (marking_stack_.overflowed()) { |
| 1382 RefillMarkingStack(); | 1441 RefillMarkingStack(); |
| 1383 EmptyMarkingStack(); | 1442 EmptyMarkingStack(); |
| 1384 } | 1443 } |
| 1385 } | 1444 } |
| 1386 | 1445 |
| 1387 | 1446 |
| 1388 void MarkCompactCollector::MarkObjectGroups() { | 1447 void MarkCompactCollector::MarkObjectGroups() { |
| 1389 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); | 1448 List<ObjectGroup*>* object_groups = |
| 1449 heap_->isolate()->global_handles()->object_groups(); |
| 1390 | 1450 |
| 1391 for (int i = 0; i < object_groups->length(); i++) { | 1451 for (int i = 0; i < object_groups->length(); i++) { |
| 1392 ObjectGroup* entry = object_groups->at(i); | 1452 ObjectGroup* entry = object_groups->at(i); |
| 1393 if (entry == NULL) continue; | 1453 if (entry == NULL) continue; |
| 1394 | 1454 |
| 1395 List<Object**>& objects = entry->objects_; | 1455 List<Object**>& objects = entry->objects_; |
| 1396 bool group_marked = false; | 1456 bool group_marked = false; |
| 1397 for (int j = 0; j < objects.length(); j++) { | 1457 for (int j = 0; j < objects.length(); j++) { |
| 1398 Object* object = *objects[j]; | 1458 Object* object = *objects[j]; |
| 1399 if (object->IsHeapObject()) { | 1459 if (object->IsHeapObject()) { |
| 1400 HeapObject* heap_object = HeapObject::cast(object); | 1460 HeapObject* heap_object = HeapObject::cast(object); |
| 1401 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1461 MarkBit mark = heap_->marking()->MarkBitFrom(heap_object); |
| 1402 if (mark.Get()) { | 1462 if (mark.Get()) { |
| 1403 group_marked = true; | 1463 group_marked = true; |
| 1404 break; | 1464 break; |
| 1405 } | 1465 } |
| 1406 } | 1466 } |
| 1407 } | 1467 } |
| 1408 | 1468 |
| 1409 if (!group_marked) continue; | 1469 if (!group_marked) continue; |
| 1410 | 1470 |
| 1411 // An object in the group is marked, so mark as grey all white heap | 1471 // An object in the group is marked, so mark as grey all white heap |
| 1412 // objects in the group. | 1472 // objects in the group. |
| 1413 for (int j = 0; j < objects.length(); ++j) { | 1473 for (int j = 0; j < objects.length(); ++j) { |
| 1414 Object* object = *objects[j]; | 1474 Object* object = *objects[j]; |
| 1415 if (object->IsHeapObject()) { | 1475 if (object->IsHeapObject()) { |
| 1416 HeapObject* heap_object = HeapObject::cast(object); | 1476 HeapObject* heap_object = HeapObject::cast(object); |
| 1417 MarkBit mark = Marking::MarkBitFrom(heap_object); | 1477 MarkBit mark = heap_->marking()->MarkBitFrom(heap_object); |
| 1418 MarkObject(heap_object, mark); | 1478 MarkObject(heap_object, mark); |
| 1419 } | 1479 } |
| 1420 } | 1480 } |
| 1421 | 1481 |
| 1422 // Once the entire group has been colored grey, set the object group | 1482 // Once the entire group has been colored grey, set the object group |
| 1423 // to NULL so it won't be processed again. | 1483 // to NULL so it won't be processed again. |
| 1424 delete entry; | 1484 delete entry; |
| 1425 object_groups->at(i) = NULL; | 1485 object_groups->at(i) = NULL; |
| 1426 } | 1486 } |
| 1427 } | 1487 } |
| 1428 | 1488 |
| 1429 | 1489 |
| 1430 void MarkCompactCollector::MarkImplicitRefGroups() { | 1490 void MarkCompactCollector::MarkImplicitRefGroups() { |
| 1431 List<ImplicitRefGroup*>* ref_groups = GlobalHandles::ImplicitRefGroups(); | 1491 List<ImplicitRefGroup*>* ref_groups = |
| 1492 heap_->isolate()->global_handles()->implicit_ref_groups(); |
| 1432 | 1493 |
| 1433 for (int i = 0; i < ref_groups->length(); i++) { | 1494 for (int i = 0; i < ref_groups->length(); i++) { |
| 1434 ImplicitRefGroup* entry = ref_groups->at(i); | 1495 ImplicitRefGroup* entry = ref_groups->at(i); |
| 1435 if (entry == NULL) continue; | 1496 if (entry == NULL) continue; |
| 1436 | 1497 |
| 1437 if (!IsMarked(entry->parent_)) continue; | 1498 if (!IsMarked(entry->parent_)) continue; |
| 1438 | 1499 |
| 1439 List<Object**>& children = entry->children_; | 1500 List<Object**>& children = entry->children_; |
| 1440 // A parent object is marked, so mark as gray all child white heap | 1501 // A parent object is marked, so mark as gray all child white heap |
| 1441 // objects. | 1502 // objects. |
| 1442 for (int j = 0; j < children.length(); ++j) { | 1503 for (int j = 0; j < children.length(); ++j) { |
| 1443 if ((*children[j])->IsHeapObject()) { | 1504 if ((*children[j])->IsHeapObject()) { |
| 1444 HeapObject* child = HeapObject::cast(*children[j]); | 1505 HeapObject* child = HeapObject::cast(*children[j]); |
| 1445 MarkBit mark = Marking::MarkBitFrom(child); | 1506 MarkBit mark = heap_->marking()->MarkBitFrom(child); |
| 1446 MarkObject(child, mark); | 1507 MarkObject(child, mark); |
| 1447 } | 1508 } |
| 1448 } | 1509 } |
| 1449 | 1510 |
| 1450 // Once the entire group has been colored gray, set the group | 1511 // Once the entire group has been colored gray, set the group |
| 1451 // to NULL so it won't be processed again. | 1512 // to NULL so it won't be processed again. |
| 1452 delete entry; | 1513 delete entry; |
| 1453 ref_groups->at(i) = NULL; | 1514 ref_groups->at(i) = NULL; |
| 1454 } | 1515 } |
| 1455 } | 1516 } |
| 1456 | 1517 |
| 1457 | 1518 |
| 1458 // Mark all objects reachable from the objects on the marking stack. | 1519 // Mark all objects reachable from the objects on the marking stack. |
| 1459 // Before: the marking stack contains zero or more heap object pointers. | 1520 // Before: the marking stack contains zero or more heap object pointers. |
| 1460 // After: the marking stack is empty, and all objects reachable from the | 1521 // After: the marking stack is empty, and all objects reachable from the |
| 1461 // marking stack have been marked, or are overflowed in the heap. | 1522 // marking stack have been marked, or are overflowed in the heap. |
| 1462 void MarkCompactCollector::EmptyMarkingStack() { | 1523 void MarkCompactCollector::EmptyMarkingStack() { |
| 1463 while (!marking_stack.is_empty()) { | 1524 while (!marking_stack_.is_empty()) { |
| 1464 HeapObject* object = marking_stack.Pop(); | 1525 HeapObject* object = marking_stack_.Pop(); |
| 1465 ASSERT(object->IsHeapObject()); | 1526 ASSERT(object->IsHeapObject()); |
| 1466 ASSERT(Heap::Contains(object)); | 1527 ASSERT(heap_->Contains(object)); |
| 1467 ASSERT(IsMarked(object)); | 1528 ASSERT(IsMarked(object)); |
| 1468 ASSERT(!object->IsOverflowed()); | 1529 ASSERT(!object->IsOverflowed()); |
| 1469 | 1530 |
| 1470 Map* map = object->map(); | 1531 Map* map = object->map(); |
| 1471 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); | 1532 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); |
| 1472 MarkObject(map, map_mark); | 1533 MarkObject(map, map_mark); |
| 1473 | 1534 |
| 1474 StaticMarkingVisitor::IterateBody(map, object); | 1535 StaticMarkingVisitor::IterateBody(map, object); |
| 1475 } | 1536 } |
| 1476 } | 1537 } |
| 1477 | 1538 |
| 1478 | 1539 |
| 1479 // Sweep the heap for overflowed objects, clear their overflow bits, and | 1540 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 1480 // push them on the marking stack. Stop early if the marking stack fills | 1541 // push them on the marking stack. Stop early if the marking stack fills |
| 1481 // before sweeping completes. If sweeping completes, there are no remaining | 1542 // before sweeping completes. If sweeping completes, there are no remaining |
| 1482 // overflowed objects in the heap so the overflow flag on the markings stack | 1543 // overflowed objects in the heap so the overflow flag on the markings stack |
| 1483 // is cleared. | 1544 // is cleared. |
| 1484 void MarkCompactCollector::RefillMarkingStack() { | 1545 void MarkCompactCollector::RefillMarkingStack() { |
| 1485 ASSERT(marking_stack.overflowed()); | 1546 UNREACHABLE(); |
| 1486 | 1547 |
| 1487 SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize); | 1548 #if 0 |
| 1488 ScanOverflowedObjects(&new_it); | 1549 ASSERT(marking_stack_.overflowed()); |
| 1489 if (marking_stack.is_full()) return; | |
| 1490 | 1550 |
| 1491 HeapObjectIterator old_pointer_it(Heap::old_pointer_space(), | 1551 SemiSpaceIterator new_it(HEAP->new_space(), &OverflowObjectSize); |
| 1552 OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it); |
| 1553 if (marking_stack_.is_full()) return; |
| 1554 |
| 1555 HeapObjectIterator old_pointer_it(HEAP->old_pointer_space(), |
| 1492 &OverflowObjectSize); | 1556 &OverflowObjectSize); |
| 1493 ScanOverflowedObjects(&old_pointer_it); | 1557 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it); |
| 1494 if (marking_stack.is_full()) return; | 1558 if (marking_stack_.is_full()) return; |
| 1495 | 1559 |
| 1496 HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize); | 1560 HeapObjectIterator old_data_it(HEAP->old_data_space(), &OverflowObjectSize); |
| 1497 ScanOverflowedObjects(&old_data_it); | 1561 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it); |
| 1498 if (marking_stack.is_full()) return; | 1562 if (marking_stack_.is_full()) return; |
| 1499 | 1563 |
| 1500 HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize); | 1564 HeapObjectIterator code_it(HEAP->code_space(), &OverflowObjectSize); |
| 1501 ScanOverflowedObjects(&code_it); | 1565 OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it); |
| 1502 if (marking_stack.is_full()) return; | 1566 if (marking_stack_.is_full()) return; |
| 1503 | 1567 |
| 1504 HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize); | 1568 HeapObjectIterator map_it(HEAP->map_space(), &OverflowObjectSize); |
| 1505 ScanOverflowedObjects(&map_it); | 1569 OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it); |
| 1506 if (marking_stack.is_full()) return; | 1570 if (marking_stack_.is_full()) return; |
| 1507 | 1571 |
| 1508 HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize); | 1572 HeapObjectIterator cell_it(HEAP->cell_space(), &OverflowObjectSize); |
| 1509 ScanOverflowedObjects(&cell_it); | 1573 OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it); |
| 1510 if (marking_stack.is_full()) return; | 1574 if (marking_stack_.is_full()) return; |
| 1511 | 1575 |
| 1512 LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize); | 1576 LargeObjectIterator lo_it(HEAP->lo_space(), &OverflowObjectSize); |
| 1513 ScanOverflowedObjects(&lo_it); | 1577 OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it); |
| 1514 if (marking_stack.is_full()) return; | 1578 if (marking_stack_.is_full()) return; |
| 1515 | 1579 |
| 1516 marking_stack.clear_overflowed(); | 1580 marking_stack_.clear_overflowed(); |
| 1581 #endif |
| 1517 } | 1582 } |
| 1518 | 1583 |
| 1519 | 1584 |
| 1520 // Mark all objects reachable (transitively) from objects on the marking | 1585 // Mark all objects reachable (transitively) from objects on the marking |
| 1521 // stack. Before: the marking stack contains zero or more heap object | 1586 // stack. Before: the marking stack contains zero or more heap object |
| 1522 // pointers. After: the marking stack is empty and there are no overflowed | 1587 // pointers. After: the marking stack is empty and there are no overflowed |
| 1523 // objects in the heap. | 1588 // objects in the heap. |
| 1524 void MarkCompactCollector::ProcessMarkingStack() { | 1589 void MarkCompactCollector::ProcessMarkingStack() { |
| 1525 EmptyMarkingStack(); | 1590 EmptyMarkingStack(); |
| 1526 while (marking_stack.overflowed()) { | 1591 while (marking_stack_.overflowed()) { |
| 1527 RefillMarkingStack(); | 1592 RefillMarkingStack(); |
| 1528 EmptyMarkingStack(); | 1593 EmptyMarkingStack(); |
| 1529 } | 1594 } |
| 1530 } | 1595 } |
| 1531 | 1596 |
| 1532 | 1597 |
| 1533 void MarkCompactCollector::ProcessExternalMarking() { | 1598 void MarkCompactCollector::ProcessExternalMarking() { |
| 1534 bool work_to_do = true; | 1599 bool work_to_do = true; |
| 1535 ASSERT(marking_stack.is_empty()); | 1600 ASSERT(marking_stack_.is_empty()); |
| 1536 while (work_to_do) { | 1601 while (work_to_do) { |
| 1537 MarkObjectGroups(); | 1602 MarkObjectGroups(); |
| 1538 MarkImplicitRefGroups(); | 1603 MarkImplicitRefGroups(); |
| 1539 work_to_do = !marking_stack.is_empty(); | 1604 work_to_do = !marking_stack_.is_empty(); |
| 1540 ProcessMarkingStack(); | 1605 ProcessMarkingStack(); |
| 1541 } | 1606 } |
| 1542 } | 1607 } |
| 1543 | 1608 |
| 1544 | 1609 |
| 1545 void MarkCompactCollector::MarkLiveObjects() { | 1610 void MarkCompactCollector::MarkLiveObjects() { |
| 1546 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); | 1611 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); |
| 1547 // The recursive GC marker detects when it is nearing stack overflow, | 1612 // The recursive GC marker detects when it is nearing stack overflow, |
| 1548 // and switches to a different marking system. JS interrupts interfere | 1613 // and switches to a different marking system. JS interrupts interfere |
| 1549 // with the C stack limit check. | 1614 // with the C stack limit check. |
| 1550 PostponeInterruptsScope postpone; | 1615 PostponeInterruptsScope postpone(heap_->isolate()); |
| 1551 | 1616 |
| 1552 #ifdef DEBUG | 1617 #ifdef DEBUG |
| 1553 ASSERT(state_ == PREPARE_GC); | 1618 ASSERT(state_ == PREPARE_GC); |
| 1554 state_ = MARK_LIVE_OBJECTS; | 1619 state_ = MARK_LIVE_OBJECTS; |
| 1555 #endif | 1620 #endif |
| 1556 // The to space contains live objects, the from space is used as a marking | 1621 // The to space contains live objects, the from space is used as a marking |
| 1557 // stack. | 1622 // stack. |
| 1558 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(), | 1623 marking_stack_.Initialize(heap_->new_space()->FromSpaceLow(), |
| 1559 Heap::new_space()->FromSpaceHigh()); | 1624 heap_->new_space()->FromSpaceHigh()); |
| 1560 | 1625 |
| 1561 ASSERT(!marking_stack.overflowed()); | 1626 ASSERT(!marking_stack_.overflowed()); |
| 1562 | 1627 |
| 1563 PrepareForCodeFlushing(); | 1628 PrepareForCodeFlushing(); |
| 1564 | 1629 |
| 1565 RootMarkingVisitor root_visitor; | 1630 RootMarkingVisitor root_visitor(heap_); |
| 1566 MarkRoots(&root_visitor); | 1631 MarkRoots(&root_visitor); |
| 1567 | 1632 |
| 1568 // The objects reachable from the roots are marked, yet unreachable | 1633 // The objects reachable from the roots are marked, yet unreachable |
| 1569 // objects are unmarked. Mark objects reachable due to host | 1634 // objects are unmarked. Mark objects reachable due to host |
| 1570 // application specific logic. | 1635 // application specific logic. |
| 1571 ProcessExternalMarking(); | 1636 ProcessExternalMarking(); |
| 1572 | 1637 |
| 1573 // The objects reachable from the roots or object groups are marked, | 1638 // The objects reachable from the roots or object groups are marked, |
| 1574 // yet unreachable objects are unmarked. Mark objects reachable | 1639 // yet unreachable objects are unmarked. Mark objects reachable |
| 1575 // only from weak global handles. | 1640 // only from weak global handles. |
| 1576 // | 1641 // |
| 1577 // First we identify nonlive weak handles and mark them as pending | 1642 // First we identify nonlive weak handles and mark them as pending |
| 1578 // destruction. | 1643 // destruction. |
| 1579 GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject); | 1644 heap_->isolate()->global_handles()->IdentifyWeakHandles( |
| 1645 &IsUnmarkedHeapObject); |
| 1580 // Then we mark the objects and process the transitive closure. | 1646 // Then we mark the objects and process the transitive closure. |
| 1581 GlobalHandles::IterateWeakRoots(&root_visitor); | 1647 heap_->isolate()->global_handles()->IterateWeakRoots(&root_visitor); |
| 1582 while (marking_stack.overflowed()) { | 1648 while (marking_stack_.overflowed()) { |
| 1583 RefillMarkingStack(); | 1649 RefillMarkingStack(); |
| 1584 EmptyMarkingStack(); | 1650 EmptyMarkingStack(); |
| 1585 } | 1651 } |
| 1586 | 1652 |
| 1587 // Repeat host application specific marking to mark unmarked objects | 1653 // Repeat host application specific marking to mark unmarked objects |
| 1588 // reachable from the weak roots. | 1654 // reachable from the weak roots. |
| 1589 ProcessExternalMarking(); | 1655 ProcessExternalMarking(); |
| 1590 | 1656 |
| 1591 AfterMarking(); | 1657 AfterMarking(); |
| 1592 } | 1658 } |
| 1593 | 1659 |
| 1594 | 1660 |
| 1595 void MarkCompactCollector::AfterMarking() { | 1661 void MarkCompactCollector::AfterMarking() { |
| 1596 // Prune the symbol table removing all symbols only pointed to by the | 1662 // Prune the symbol table removing all symbols only pointed to by the |
| 1597 // symbol table. Cannot use symbol_table() here because the symbol | 1663 // symbol table. Cannot use symbol_table() here because the symbol |
| 1598 // table is marked. | 1664 // table is marked. |
| 1599 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); | 1665 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); |
| 1600 SymbolTableCleaner v; | 1666 SymbolTableCleaner v; |
| 1601 symbol_table->IterateElements(&v); | 1667 symbol_table->IterateElements(&v); |
| 1602 symbol_table->ElementsRemoved(v.PointersRemoved()); | 1668 symbol_table->ElementsRemoved(v.PointersRemoved()); |
| 1603 ExternalStringTable::Iterate(&v); | 1669 heap_->external_string_table_.Iterate(&v); |
| 1604 ExternalStringTable::CleanUp(); | 1670 heap_->external_string_table_.CleanUp(); |
| 1605 | 1671 |
| 1606 // Process the weak references. | 1672 // Process the weak references. |
| 1607 MarkCompactWeakObjectRetainer mark_compact_object_retainer; | 1673 MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
| 1608 Heap::ProcessWeakReferences(&mark_compact_object_retainer); | 1674 heap_->ProcessWeakReferences(&mark_compact_object_retainer); |
| 1609 | 1675 |
| 1610 // Remove object groups after marking phase. | 1676 // Remove object groups after marking phase. |
| 1611 GlobalHandles::RemoveObjectGroups(); | 1677 heap_->isolate()->global_handles()->RemoveObjectGroups(); |
| 1612 GlobalHandles::RemoveImplicitRefGroups(); | 1678 heap_->isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1613 | 1679 |
| 1614 // Flush code from collected candidates. | 1680 // Flush code from collected candidates. |
| 1615 if (FLAG_flush_code) { | 1681 if (is_code_flushing_enabled()) { |
| 1616 FlushCode::ProcessCandidates(); | 1682 code_flusher_->ProcessCandidates(); |
| 1617 } | 1683 } |
| 1618 | 1684 |
| 1619 // Clean up dead objects from the runtime profiler. | 1685 // Clean up dead objects from the runtime profiler. |
| 1620 RuntimeProfiler::RemoveDeadSamples(); | 1686 heap_->isolate()->runtime_profiler()->RemoveDeadSamples(); |
| 1621 } | 1687 } |
| 1622 | 1688 |
| 1623 | 1689 |
| 1624 #ifdef DEBUG | 1690 #ifdef DEBUG |
| 1625 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { | 1691 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { |
| 1626 live_bytes_ += obj->Size(); | 1692 live_bytes_ += obj->Size(); |
| 1627 if (Heap::new_space()->Contains(obj)) { | 1693 if (HEAP->new_space()->Contains(obj)) { |
| 1628 live_young_objects_size_ += obj->Size(); | 1694 live_young_objects_size_ += obj->Size(); |
| 1629 } else if (Heap::map_space()->Contains(obj)) { | 1695 } else if (HEAP->map_space()->Contains(obj)) { |
| 1630 ASSERT(obj->IsMap()); | 1696 ASSERT(obj->IsMap()); |
| 1631 live_map_objects_size_ += obj->Size(); | 1697 live_map_objects_size_ += obj->Size(); |
| 1632 } else if (Heap::cell_space()->Contains(obj)) { | 1698 } else if (HEAP->cell_space()->Contains(obj)) { |
| 1633 ASSERT(obj->IsJSGlobalPropertyCell()); | 1699 ASSERT(obj->IsJSGlobalPropertyCell()); |
| 1634 live_cell_objects_size_ += obj->Size(); | 1700 live_cell_objects_size_ += obj->Size(); |
| 1635 } else if (Heap::old_pointer_space()->Contains(obj)) { | 1701 } else if (HEAP->old_pointer_space()->Contains(obj)) { |
| 1636 live_old_pointer_objects_size_ += obj->Size(); | 1702 live_old_pointer_objects_size_ += obj->Size(); |
| 1637 } else if (Heap::old_data_space()->Contains(obj)) { | 1703 } else if (HEAP->old_data_space()->Contains(obj)) { |
| 1638 live_old_data_objects_size_ += obj->Size(); | 1704 live_old_data_objects_size_ += obj->Size(); |
| 1639 } else if (Heap::code_space()->Contains(obj)) { | 1705 } else if (HEAP->code_space()->Contains(obj)) { |
| 1640 live_code_objects_size_ += obj->Size(); | 1706 live_code_objects_size_ += obj->Size(); |
| 1641 } else if (Heap::lo_space()->Contains(obj)) { | 1707 } else if (HEAP->lo_space()->Contains(obj)) { |
| 1642 live_lo_objects_size_ += obj->Size(); | 1708 live_lo_objects_size_ += obj->Size(); |
| 1643 } else { | 1709 } else { |
| 1644 UNREACHABLE(); | 1710 UNREACHABLE(); |
| 1645 } | 1711 } |
| 1646 } | 1712 } |
| 1647 #endif // DEBUG | 1713 #endif // DEBUG |
| 1648 | 1714 |
| 1649 | 1715 |
| 1650 // Safe to use during marking phase only. | 1716 // Safe to use during marking phase only. |
| 1651 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { | 1717 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { |
| 1652 return object->map()->instance_type() == MAP_TYPE; | 1718 return object->map()->instance_type() == MAP_TYPE; |
| 1653 } | 1719 } |
| 1654 | 1720 |
| 1655 | 1721 |
| 1656 void MarkCompactCollector::ClearNonLiveTransitions() { | 1722 void MarkCompactCollector::ClearNonLiveTransitions() { |
| 1657 HeapObjectIterator map_iterator(Heap::map_space()); | 1723 HeapObjectIterator map_iterator(HEAP->map_space()); |
| 1658 // Iterate over the map space, setting map transitions that go from | 1724 // Iterate over the map space, setting map transitions that go from |
| 1659 // a marked map to an unmarked map to null transitions. At the same time, | 1725 // a marked map to an unmarked map to null transitions. At the same time, |
| 1660 // set all the prototype fields of maps back to their original value, | 1726 // set all the prototype fields of maps back to their original value, |
| 1661 // dropping the back pointers temporarily stored in the prototype field. | 1727 // dropping the back pointers temporarily stored in the prototype field. |
| 1662 // Setting the prototype field requires following the linked list of | 1728 // Setting the prototype field requires following the linked list of |
| 1663 // back pointers, reversing them all at once. This allows us to find | 1729 // back pointers, reversing them all at once. This allows us to find |
| 1664 // those maps with map transitions that need to be nulled, and only | 1730 // those maps with map transitions that need to be nulled, and only |
| 1665 // scan the descriptor arrays of those maps, not all maps. | 1731 // scan the descriptor arrays of those maps, not all maps. |
| 1666 // All of these actions are carried out only on maps of JSObjects | 1732 // All of these actions are carried out only on maps of JSObjects |
| 1667 // and related subtypes. | 1733 // and related subtypes. |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1700 while (SafeIsMap(current)) { | 1766 while (SafeIsMap(current)) { |
| 1701 next = current->prototype(); | 1767 next = current->prototype(); |
| 1702 // There should never be a dead map above a live map. | 1768 // There should never be a dead map above a live map. |
| 1703 MarkBit current_mark = Marking::MarkBitFromOldSpace(current); | 1769 MarkBit current_mark = Marking::MarkBitFromOldSpace(current); |
| 1704 ASSERT(on_dead_path || current_mark.Get()); | 1770 ASSERT(on_dead_path || current_mark.Get()); |
| 1705 | 1771 |
| 1706 // A live map above a dead map indicates a dead transition. | 1772 // A live map above a dead map indicates a dead transition. |
| 1707 // This test will always be false on the first iteration. | 1773 // This test will always be false on the first iteration. |
| 1708 if (on_dead_path && current_mark.Get()) { | 1774 if (on_dead_path && current_mark.Get()) { |
| 1709 on_dead_path = false; | 1775 on_dead_path = false; |
| 1710 current->ClearNonLiveTransitions(real_prototype); | 1776 current->ClearNonLiveTransitions(heap_, real_prototype); |
| 1711 } | 1777 } |
| 1712 *HeapObject::RawField(current, Map::kPrototypeOffset) = | 1778 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
| 1713 real_prototype; | 1779 real_prototype; |
| 1714 current = reinterpret_cast<Map*>(next); | 1780 current = reinterpret_cast<Map*>(next); |
| 1715 } | 1781 } |
| 1716 } | 1782 } |
| 1717 } | 1783 } |
| 1718 | 1784 |
| 1719 | 1785 |
| 1720 // We scavange new space simultaneously with sweeping. This is done in two | 1786 // We scavange new space simultaneously with sweeping. This is done in two |
| 1721 // passes. | 1787 // passes. |
| 1722 // | 1788 // |
| 1723 // The first pass migrates all alive objects from one semispace to another or | 1789 // The first pass migrates all alive objects from one semispace to another or |
| 1724 // promotes them to old space. Forwarding address is written directly into | 1790 // promotes them to old space. Forwarding address is written directly into |
| 1725 // first word of object without any encoding. If object is dead we write | 1791 // first word of object without any encoding. If object is dead we write |
| 1726 // NULL as a forwarding address. | 1792 // NULL as a forwarding address. |
| 1727 // | 1793 // |
| 1728 // The second pass updates pointers to new space in all spaces. It is possible | 1794 // The second pass updates pointers to new space in all spaces. It is possible |
| 1729 // to encounter pointers to dead new space objects during traversal of pointers | 1795 // to encounter pointers to dead new space objects during traversal of pointers |
| 1730 // to new space. We should clear them to avoid encountering them during next | 1796 // to new space. We should clear them to avoid encountering them during next |
| 1731 // pointer iteration. This is an issue if the store buffer overflows and we | 1797 // pointer iteration. This is an issue if the store buffer overflows and we |
| 1732 // have to scan the entire old space, including dead objects, looking for | 1798 // have to scan the entire old space, including dead objects, looking for |
| 1733 // pointers to new space. | 1799 // pointers to new space. |
| 1734 static void MigrateObject(Address dst, | 1800 static void MigrateObject(Heap* heap, |
| 1801 Address dst, |
| 1735 Address src, | 1802 Address src, |
| 1736 int size, | 1803 int size, |
| 1737 bool to_old_space) { | 1804 bool to_old_space) { |
| 1738 if (to_old_space) { | 1805 if (to_old_space) { |
| 1739 Heap::CopyBlockToOldSpaceAndUpdateWriteBarrier(dst, src, size); | 1806 HEAP->CopyBlockToOldSpaceAndUpdateWriteBarrier(dst, src, size); |
| 1740 } else { | 1807 } else { |
| 1741 Heap::CopyBlock(dst, src, size); | 1808 heap->CopyBlock(dst, src, size); |
| 1742 } | 1809 } |
| 1743 Memory::Address_at(src) = dst; | 1810 Memory::Address_at(src) = dst; |
| 1744 } | 1811 } |
| 1745 | 1812 |
| 1746 | 1813 |
| 1747 class StaticPointersToNewGenUpdatingVisitor : public | 1814 class StaticPointersToNewGenUpdatingVisitor : public |
| 1748 StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> { | 1815 StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> { |
| 1749 public: | 1816 public: |
| 1750 static inline void VisitPointer(Object** p) { | 1817 static inline void VisitPointer(Heap* heap, Object** p) { |
| 1751 if (!(*p)->IsHeapObject()) return; | 1818 if (!(*p)->IsHeapObject()) return; |
| 1752 | 1819 |
| 1753 HeapObject* obj = HeapObject::cast(*p); | 1820 HeapObject* obj = HeapObject::cast(*p); |
| 1754 Address old_addr = obj->address(); | 1821 Address old_addr = obj->address(); |
| 1755 | 1822 |
| 1756 if (Heap::new_space()->Contains(obj)) { | 1823 if (heap->new_space()->Contains(obj)) { |
| 1757 ASSERT(Heap::InFromSpace(*p)); | 1824 ASSERT(heap->InFromSpace(*p)); |
| 1758 *p = HeapObject::FromAddress(Memory::Address_at(old_addr)); | 1825 *p = HeapObject::FromAddress(Memory::Address_at(old_addr)); |
| 1826 ASSERT(!heap->InFromSpace(*p)); |
| 1759 } | 1827 } |
| 1760 } | 1828 } |
| 1761 }; | 1829 }; |
| 1762 | 1830 |
| 1763 | 1831 |
| 1764 // Visitor for updating pointers from live objects in old spaces to new space. | 1832 // Visitor for updating pointers from live objects in old spaces to new space. |
| 1765 // It does not expect to encounter pointers to dead objects. | 1833 // It does not expect to encounter pointers to dead objects. |
| 1766 class PointersToNewGenUpdatingVisitor: public ObjectVisitor { | 1834 class PointersToNewGenUpdatingVisitor: public ObjectVisitor { |
| 1767 public: | 1835 public: |
| 1836 explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { } |
| 1837 |
| 1768 void VisitPointer(Object** p) { | 1838 void VisitPointer(Object** p) { |
| 1769 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p); | 1839 StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p); |
| 1770 } | 1840 } |
| 1771 | 1841 |
| 1772 void VisitPointers(Object** start, Object** end) { | 1842 void VisitPointers(Object** start, Object** end) { |
| 1773 for (Object** p = start; p < end; p++) { | 1843 for (Object** p = start; p < end; p++) { |
| 1774 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p); | 1844 StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p); |
| 1775 } | 1845 } |
| 1776 } | 1846 } |
| 1777 | 1847 |
| 1778 void VisitCodeTarget(RelocInfo* rinfo) { | 1848 void VisitCodeTarget(RelocInfo* rinfo) { |
| 1779 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 1849 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 1780 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 1850 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 1781 VisitPointer(&target); | 1851 VisitPointer(&target); |
| 1782 rinfo->set_target_address(Code::cast(target)->instruction_start(), NULL); | 1852 rinfo->set_target_address(Code::cast(target)->instruction_start(), NULL); |
| 1783 } | 1853 } |
| 1784 | 1854 |
| 1785 void VisitDebugTarget(RelocInfo* rinfo) { | 1855 void VisitDebugTarget(RelocInfo* rinfo) { |
| 1786 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 1856 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 1787 rinfo->IsPatchedReturnSequence()) || | 1857 rinfo->IsPatchedReturnSequence()) || |
| 1788 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 1858 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 1789 rinfo->IsPatchedDebugBreakSlotSequence())); | 1859 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 1790 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 1860 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| 1791 VisitPointer(&target); | 1861 VisitPointer(&target); |
| 1792 rinfo->set_call_address(Code::cast(target)->instruction_start()); | 1862 rinfo->set_call_address(Code::cast(target)->instruction_start()); |
| 1793 } | 1863 } |
| 1864 private: |
| 1865 Heap* heap_; |
| 1794 }; | 1866 }; |
| 1795 | 1867 |
| 1796 | 1868 |
| 1797 static void UpdatePointerToNewGen(HeapObject** p, HeapObject* object) { | 1869 static void UpdatePointerToNewGen(HeapObject** p, HeapObject* object) { |
| 1798 ASSERT(Heap::InFromSpace(object)); | 1870 ASSERT(HEAP->InFromSpace(object)); |
| 1799 ASSERT(*p == object); | 1871 ASSERT(*p == object); |
| 1800 | 1872 |
| 1801 Address old_addr = object->address(); | 1873 Address old_addr = object->address(); |
| 1802 | 1874 |
| 1803 Address new_addr = Memory::Address_at(old_addr); | 1875 Address new_addr = Memory::Address_at(old_addr); |
| 1804 | 1876 |
| 1805 // The new space sweep will overwrite the map word of dead objects | 1877 // The new space sweep will overwrite the map word of dead objects |
| 1806 // with NULL. In this case we do not need to transfer this entry to | 1878 // with NULL. In this case we do not need to transfer this entry to |
| 1807 // the store buffer which we are rebuilding. | 1879 // the store buffer which we are rebuilding. |
| 1808 if (new_addr != NULL) { | 1880 if (new_addr != NULL) { |
| 1809 *p = HeapObject::FromAddress(new_addr); | 1881 *p = HeapObject::FromAddress(new_addr); |
| 1810 if (Heap::InNewSpace(new_addr)) { | 1882 if (HEAP->InNewSpace(new_addr)) { |
| 1811 StoreBuffer::EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(p)); | 1883 HEAP->store_buffer()-> |
| 1884 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(p)); |
| 1812 } | 1885 } |
| 1813 } else { | 1886 } else { |
| 1814 // We have to zap this pointer, because the store buffer may overflow later, | 1887 // We have to zap this pointer, because the store buffer may overflow later, |
| 1815 // and then we have to scan the entire heap and we don't want to find | 1888 // and then we have to scan the entire heap and we don't want to find |
| 1816 // spurious newspace pointers in the old space. | 1889 // spurious newspace pointers in the old space. |
| 1817 *p = HeapObject::FromAddress(NULL); // Fake heap object not in new space. | 1890 *p = HeapObject::FromAddress(NULL); // Fake heap object not in new space. |
| 1818 } | 1891 } |
| 1819 } | 1892 } |
| 1820 | 1893 |
| 1821 | 1894 |
| 1822 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) { | 1895 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
| 1896 Object** p) { |
| 1823 Address old_addr = HeapObject::cast(*p)->address(); | 1897 Address old_addr = HeapObject::cast(*p)->address(); |
| 1824 Address new_addr = Memory::Address_at(old_addr); | 1898 Address new_addr = Memory::Address_at(old_addr); |
| 1825 return String::cast(HeapObject::FromAddress(new_addr)); | 1899 return String::cast(HeapObject::FromAddress(new_addr)); |
| 1826 } | 1900 } |
| 1827 | 1901 |
| 1828 | 1902 |
| 1829 static bool TryPromoteObject(HeapObject* object, int object_size) { | 1903 static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) { |
| 1830 Object* result; | 1904 Object* result; |
| 1831 | 1905 |
| 1832 if (object_size > Heap::MaxObjectSizeInPagedSpace()) { | 1906 if (object_size > heap->MaxObjectSizeInPagedSpace()) { |
| 1833 MaybeObject* maybe_result = | 1907 MaybeObject* maybe_result = |
| 1834 Heap::lo_space()->AllocateRawFixedArray(object_size); | 1908 heap->lo_space()->AllocateRawFixedArray(object_size); |
| 1835 if (maybe_result->ToObject(&result)) { | 1909 if (maybe_result->ToObject(&result)) { |
| 1836 HeapObject* target = HeapObject::cast(result); | 1910 HeapObject* target = HeapObject::cast(result); |
| 1837 MigrateObject(target->address(), object->address(), object_size, true); | 1911 MigrateObject(heap, target->address(), object->address(), object_size, |
| 1838 MarkCompactCollector::tracer()-> | 1912 true); |
| 1913 heap->mark_compact_collector()->tracer()-> |
| 1839 increment_promoted_objects_size(object_size); | 1914 increment_promoted_objects_size(object_size); |
| 1840 return true; | 1915 return true; |
| 1841 } | 1916 } |
| 1842 } else { | 1917 } else { |
| 1843 OldSpace* target_space = Heap::TargetSpace(object); | 1918 OldSpace* target_space = heap->TargetSpace(object); |
| 1844 | 1919 |
| 1845 ASSERT(target_space == Heap::old_pointer_space() || | 1920 ASSERT(target_space == heap->old_pointer_space() || |
| 1846 target_space == Heap::old_data_space()); | 1921 target_space == heap->old_data_space()); |
| 1847 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); | 1922 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); |
| 1848 if (maybe_result->ToObject(&result)) { | 1923 if (maybe_result->ToObject(&result)) { |
| 1849 HeapObject* target = HeapObject::cast(result); | 1924 HeapObject* target = HeapObject::cast(result); |
| 1850 MigrateObject(target->address(), | 1925 MigrateObject(heap, |
| 1926 target->address(), |
| 1851 object->address(), | 1927 object->address(), |
| 1852 object_size, | 1928 object_size, |
| 1853 target_space == Heap::old_pointer_space()); | 1929 target_space == heap->old_pointer_space()); |
| 1854 MarkCompactCollector::tracer()-> | 1930 heap->mark_compact_collector()->tracer()-> |
| 1855 increment_promoted_objects_size(object_size); | 1931 increment_promoted_objects_size(object_size); |
| 1856 return true; | 1932 return true; |
| 1857 } | 1933 } |
| 1858 } | 1934 } |
| 1859 | 1935 |
| 1860 return false; | 1936 return false; |
| 1861 } | 1937 } |
| 1862 | 1938 |
| 1863 | 1939 |
| 1864 void MarkCompactCollector::SweepNewSpace(NewSpace* space) { | 1940 void MarkCompactCollector::SweepNewSpace(NewSpace* space) { |
| 1865 Heap::CheckNewSpaceExpansionCriteria(); | 1941 heap_->CheckNewSpaceExpansionCriteria(); |
| 1866 | 1942 |
| 1867 Address from_bottom = space->bottom(); | 1943 Address from_bottom = space->bottom(); |
| 1868 Address from_top = space->top(); | 1944 Address from_top = space->top(); |
| 1869 | 1945 |
| 1870 // Flip the semispaces. After flipping, to space is empty, from space has | 1946 // Flip the semispaces. After flipping, to space is empty, from space has |
| 1871 // live objects. | 1947 // live objects. |
| 1872 space->Flip(); | 1948 space->Flip(); |
| 1873 space->ResetAllocationInfo(); | 1949 space->ResetAllocationInfo(); |
| 1874 | 1950 |
| 1875 int size = 0; | 1951 int size = 0; |
| 1876 int survivors_size = 0; | 1952 int survivors_size = 0; |
| 1877 | 1953 |
| 1878 // First pass: traverse all objects in inactive semispace, remove marks, | 1954 // First pass: traverse all objects in inactive semispace, remove marks, |
| 1879 // migrate live objects and write forwarding addresses. This stage puts | 1955 // migrate live objects and write forwarding addresses. This stage puts |
| 1880 // new entries in the store buffer and may cause some pages to be marked | 1956 // new entries in the store buffer and may cause some pages to be marked |
| 1881 // scan-on-scavenge. | 1957 // scan-on-scavenge. |
| 1882 for (Address current = from_bottom; current < from_top; current += size) { | 1958 for (Address current = from_bottom; current < from_top; current += size) { |
| 1883 HeapObject* object = HeapObject::FromAddress(current); | 1959 HeapObject* object = HeapObject::FromAddress(current); |
| 1884 | 1960 |
| 1885 | 1961 |
| 1886 MarkBit mark_bit = Marking::MarkBitFromNewSpace(object); | 1962 MarkBit mark_bit = heap_->marking()->MarkBitFromNewSpace(object); |
| 1887 if (mark_bit.Get()) { | 1963 if (mark_bit.Get()) { |
| 1888 mark_bit.Clear(); | 1964 mark_bit.Clear(); |
| 1889 MarkCompactCollector::tracer()->decrement_marked_count(); | 1965 heap_->mark_compact_collector()->tracer()->decrement_marked_count(); |
| 1890 | 1966 |
| 1891 size = object->Size(); | 1967 size = object->Size(); |
| 1892 survivors_size += size; | 1968 survivors_size += size; |
| 1893 | 1969 |
| 1894 // Aggressively promote young survivors to the old space. | 1970 // Aggressively promote young survivors to the old space. |
| 1895 if (TryPromoteObject(object, size)) { | 1971 if (TryPromoteObject(heap_, object, size)) { |
| 1896 continue; | 1972 continue; |
| 1897 } | 1973 } |
| 1898 | 1974 |
| 1899 // Promotion failed. Just migrate object to another semispace. | 1975 // Promotion failed. Just migrate object to another semispace. |
| 1900 // Allocation cannot fail at this point: semispaces are of equal size. | 1976 // Allocation cannot fail at this point: semispaces are of equal size. |
| 1901 Object* target = space->AllocateRaw(size)->ToObjectUnchecked(); | 1977 Object* target = space->AllocateRaw(size)->ToObjectUnchecked(); |
| 1902 | 1978 |
| 1903 MigrateObject(HeapObject::cast(target)->address(), | 1979 MigrateObject(heap_, |
| 1980 HeapObject::cast(target)->address(), |
| 1904 current, | 1981 current, |
| 1905 size, | 1982 size, |
| 1906 false); | 1983 false); |
| 1907 } else { | 1984 } else { |
| 1908 // Process the dead object before we write a NULL into its header. | 1985 // Process the dead object before we write a NULL into its header. |
| 1909 LiveObjectList::ProcessNonLive(object); | 1986 LiveObjectList::ProcessNonLive(object); |
| 1910 | 1987 |
| 1911 size = object->Size(); | 1988 size = object->Size(); |
| 1912 // Mark dead objects in the new space with null in their map field. | 1989 // Mark dead objects in the new space with null in their map field. |
| 1913 Memory::Address_at(current) = NULL; | 1990 Memory::Address_at(current) = NULL; |
| 1914 } | 1991 } |
| 1915 } | 1992 } |
| 1916 | 1993 |
| 1917 // Second pass: find pointers to new space and update them. | 1994 // Second pass: find pointers to new space and update them. |
| 1918 PointersToNewGenUpdatingVisitor updating_visitor; | 1995 PointersToNewGenUpdatingVisitor updating_visitor(heap_); |
| 1919 | 1996 |
| 1920 // Update pointers in to space. | 1997 // Update pointers in to space. |
| 1921 Address current = space->bottom(); | 1998 Address current = space->bottom(); |
| 1922 while (current < space->top()) { | 1999 while (current < space->top()) { |
| 1923 HeapObject* object = HeapObject::FromAddress(current); | 2000 HeapObject* object = HeapObject::FromAddress(current); |
| 1924 current += | 2001 current += |
| 1925 StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(), | 2002 StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(), |
| 1926 object); | 2003 object); |
| 1927 } | 2004 } |
| 1928 | 2005 |
| 1929 // Update roots. | 2006 // Update roots. |
| 1930 Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE); | 2007 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE); |
| 1931 LiveObjectList::IterateElements(&updating_visitor); | 2008 LiveObjectList::IterateElements(&updating_visitor); |
| 1932 | 2009 |
| 1933 { | 2010 { |
| 1934 StoreBufferRebuildScope scope(&Heap::ScavengeStoreBufferCallback); | 2011 StoreBufferRebuildScope scope(heap_, |
| 1935 StoreBuffer::IteratePointersToNewSpace(&UpdatePointerToNewGen); | 2012 heap_->store_buffer(), |
| 2013 &Heap::ScavengeStoreBufferCallback); |
| 2014 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointerToNewGen); |
| 1936 } | 2015 } |
| 1937 | 2016 |
| 1938 // Update pointers from cells. | 2017 // Update pointers from cells. |
| 1939 HeapObjectIterator cell_iterator(Heap::cell_space()); | 2018 HeapObjectIterator cell_iterator(heap_->cell_space()); |
| 1940 for (HeapObject* cell = cell_iterator.Next(); | 2019 for (HeapObject* cell = cell_iterator.Next(); |
| 1941 cell != NULL; | 2020 cell != NULL; |
| 1942 cell = cell_iterator.Next()) { | 2021 cell = cell_iterator.Next()) { |
| 1943 if (cell->IsJSGlobalPropertyCell()) { | 2022 if (cell->IsJSGlobalPropertyCell()) { |
| 1944 Address value_address = | 2023 Address value_address = |
| 1945 reinterpret_cast<Address>(cell) + | 2024 reinterpret_cast<Address>(cell) + |
| 1946 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 2025 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
| 1947 updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 2026 updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 1948 } | 2027 } |
| 1949 } | 2028 } |
| 1950 | 2029 |
| 1951 // Update pointer from the global contexts list. | 2030 // Update pointer from the global contexts list. |
| 1952 updating_visitor.VisitPointer(Heap::global_contexts_list_address()); | 2031 updating_visitor.VisitPointer(heap_->global_contexts_list_address()); |
| 1953 | 2032 |
| 1954 // Update pointers from external string table. | 2033 // Update pointers from external string table. |
| 1955 Heap::UpdateNewSpaceReferencesInExternalStringTable( | 2034 heap_->UpdateNewSpaceReferencesInExternalStringTable( |
| 1956 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 2035 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| 1957 | 2036 |
| 1958 // All pointers were updated. Update auxiliary allocation info. | 2037 // All pointers were updated. Update auxiliary allocation info. |
| 1959 Heap::IncrementYoungSurvivorsCounter(survivors_size); | 2038 heap_->IncrementYoungSurvivorsCounter(survivors_size); |
| 1960 space->set_age_mark(space->top()); | 2039 space->set_age_mark(space->top()); |
| 1961 | 2040 |
| 1962 // Update JSFunction pointers from the runtime profiler. | 2041 // Update JSFunction pointers from the runtime profiler. |
| 1963 RuntimeProfiler::UpdateSamplesAfterScavenge(); | 2042 heap_->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); |
| 1964 } | 2043 } |
| 1965 | 2044 |
| 1966 | 2045 |
| 1967 INLINE(static uint32_t SweepFree(PagedSpace* space, | 2046 INLINE(static uint32_t SweepFree(PagedSpace* space, |
| 1968 Page* p, | 2047 Page* p, |
| 1969 uint32_t free_start, | 2048 uint32_t free_start, |
| 1970 uint32_t region_end, | 2049 uint32_t region_end, |
| 1971 uint32_t* cells)); | 2050 uint32_t* cells)); |
| 1972 | 2051 |
| 1973 | 2052 |
| (...skipping 495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2469 #endif | 2548 #endif |
| 2470 | 2549 |
| 2471 ASSERT(!IsCompacting()); | 2550 ASSERT(!IsCompacting()); |
| 2472 SweeperType how_to_sweep = CONSERVATIVE; | 2551 SweeperType how_to_sweep = CONSERVATIVE; |
| 2473 if (sweep_precisely_) how_to_sweep = PRECISE; | 2552 if (sweep_precisely_) how_to_sweep = PRECISE; |
| 2474 // Noncompacting collections simply sweep the spaces to clear the mark | 2553 // Noncompacting collections simply sweep the spaces to clear the mark |
| 2475 // bits and free the nonlive blocks (for old and map spaces). We sweep | 2554 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 2476 // the map space last because freeing non-live maps overwrites them and | 2555 // the map space last because freeing non-live maps overwrites them and |
| 2477 // the other spaces rely on possibly non-live maps to get the sizes for | 2556 // the other spaces rely on possibly non-live maps to get the sizes for |
| 2478 // non-live objects. | 2557 // non-live objects. |
| 2479 SweepSpace(Heap::old_pointer_space(), how_to_sweep); | 2558 SweepSpace(HEAP->old_pointer_space(), how_to_sweep); |
| 2480 SweepSpace(Heap::old_data_space(), how_to_sweep); | 2559 SweepSpace(HEAP->old_data_space(), how_to_sweep); |
| 2481 SweepSpace(Heap::code_space(), PRECISE); | 2560 SweepSpace(HEAP->code_space(), PRECISE); |
| 2482 // TODO(gc): implement specialized sweeper for cell space. | 2561 // TODO(gc): implement specialized sweeper for cell space. |
| 2483 SweepSpace(Heap::cell_space(), PRECISE); | 2562 SweepSpace(HEAP->cell_space(), PRECISE); |
| 2484 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); | 2563 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| 2485 SweepNewSpace(Heap::new_space()); | 2564 SweepNewSpace(heap_->new_space()); |
| 2486 } | 2565 } |
| 2487 // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of | 2566 // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of |
| 2488 // map space to detect whether unmarked map became dead in this | 2567 // map space to detect whether unmarked map became dead in this |
| 2489 // collection or in one of the previous ones. | 2568 // collection or in one of the previous ones. |
| 2490 // TODO(gc): Implement specialized sweeper for map space. | 2569 // TODO(gc): Implement specialized sweeper for map space. |
| 2491 SweepSpace(Heap::map_space(), PRECISE); | 2570 SweepSpace(HEAP->map_space(), PRECISE); |
| 2492 | 2571 |
| 2493 ASSERT(live_map_objects_size_ <= Heap::map_space()->Size()); | 2572 ASSERT(live_map_objects_size_ <= HEAP->map_space()->Size()); |
| 2494 | 2573 |
| 2495 // Deallocate unmarked objects and clear marked bits for marked objects. | 2574 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 2496 Heap::lo_space()->FreeUnmarkedObjects(); | 2575 heap_->lo_space()->FreeUnmarkedObjects(); |
| 2497 } | 2576 } |
| 2498 | 2577 |
| 2499 | 2578 |
| 2500 // Iterate the live objects in a range of addresses (eg, a page or a | 2579 // Iterate the live objects in a range of addresses (eg, a page or a |
| 2501 // semispace). The live regions of the range have been linked into a list. | 2580 // semispace). The live regions of the range have been linked into a list. |
| 2502 // The first live region is [first_live_start, first_live_end), and the last | 2581 // The first live region is [first_live_start, first_live_end), and the last |
| 2503 // address in the range is top. The callback function is used to get the | 2582 // address in the range is top. The callback function is used to get the |
| 2504 // size of each live object. | 2583 // size of each live object. |
| 2505 int MarkCompactCollector::IterateLiveObjectsInRange( | 2584 int MarkCompactCollector::IterateLiveObjectsInRange( |
| 2506 Address start, | 2585 Address start, |
| 2507 Address end, | 2586 Address end, |
| 2508 HeapObjectCallback size_func) { | 2587 LiveObjectCallback size_func) { |
| 2509 int live_objects_size = 0; | 2588 int live_objects_size = 0; |
| 2510 Address current = start; | 2589 Address current = start; |
| 2511 while (current < end) { | 2590 while (current < end) { |
| 2512 uint32_t encoded_map = Memory::uint32_at(current); | 2591 uint32_t encoded_map = Memory::uint32_at(current); |
| 2513 if (encoded_map == kSingleFreeEncoding) { | 2592 if (encoded_map == kSingleFreeEncoding) { |
| 2514 current += kPointerSize; | 2593 current += kPointerSize; |
| 2515 } else if (encoded_map == kMultiFreeEncoding) { | 2594 } else if (encoded_map == kMultiFreeEncoding) { |
| 2516 current += Memory::int_at(current + kIntSize); | 2595 current += Memory::int_at(current + kIntSize); |
| 2517 } else { | 2596 } else { |
| 2518 int size = size_func(HeapObject::FromAddress(current)); | 2597 int size = (this->*size_func)(HeapObject::FromAddress(current)); |
| 2519 current += size; | 2598 current += size; |
| 2520 live_objects_size += size; | 2599 live_objects_size += size; |
| 2521 } | 2600 } |
| 2522 } | 2601 } |
| 2523 return live_objects_size; | 2602 return live_objects_size; |
| 2524 } | 2603 } |
| 2525 | 2604 |
| 2526 | 2605 |
| 2527 int MarkCompactCollector::IterateLiveObjects(NewSpace* space, | 2606 int MarkCompactCollector::IterateLiveObjects( |
| 2528 HeapObjectCallback size_f) { | 2607 NewSpace* space, LiveObjectCallback size_f) { |
| 2529 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 2608 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); |
| 2530 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); | 2609 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); |
| 2531 } | 2610 } |
| 2532 | 2611 |
| 2533 | 2612 |
| 2534 int MarkCompactCollector::IterateLiveObjects(PagedSpace* space, | 2613 int MarkCompactCollector::IterateLiveObjects( |
| 2535 HeapObjectCallback size_f) { | 2614 PagedSpace* space, LiveObjectCallback size_f) { |
| 2536 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 2615 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); |
| 2537 // TODO(gc): Do a mark-sweep first with precise sweeping. | 2616 // TODO(gc): Do a mark-sweep first with precise sweeping. |
| 2538 int total = 0; | 2617 int total = 0; |
| 2539 PageIterator it(space); | 2618 PageIterator it(space); |
| 2540 while (it.has_next()) { | 2619 while (it.has_next()) { |
| 2541 Page* p = it.next(); | 2620 Page* p = it.next(); |
| 2542 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), | 2621 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), |
| 2543 p->ObjectAreaEnd(), | 2622 p->ObjectAreaEnd(), |
| 2544 size_f); | 2623 size_f); |
| 2545 } | 2624 } |
| 2546 return total; | 2625 return total; |
| 2547 } | 2626 } |
| 2548 | 2627 |
| 2549 | 2628 |
| 2550 // TODO(gc) ReportDeleteIfNeeded is not called currently. | 2629 // TODO(gc) ReportDeleteIfNeeded is not called currently. |
| 2551 // Our profiling tools do not expect intersections between | 2630 // Our profiling tools do not expect intersections between |
| 2552 // code objects. We should either reenable it or change our tools. | 2631 // code objects. We should either reenable it or change our tools. |
| 2632 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 2633 if (enable) { |
| 2634 if (code_flusher_ != NULL) return; |
| 2635 code_flusher_ = new CodeFlusher(heap_->isolate()); |
| 2636 } else { |
| 2637 if (code_flusher_ == NULL) return; |
| 2638 delete code_flusher_; |
| 2639 code_flusher_ = NULL; |
| 2640 } |
| 2641 } |
| 2642 |
| 2643 |
| 2553 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { | 2644 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { |
| 2554 #ifdef ENABLE_GDB_JIT_INTERFACE | 2645 #ifdef ENABLE_GDB_JIT_INTERFACE |
| 2555 if (obj->IsCode()) { | 2646 if (obj->IsCode()) { |
| 2556 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); | 2647 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); |
| 2557 } | 2648 } |
| 2558 #endif | 2649 #endif |
| 2559 #ifdef ENABLE_LOGGING_AND_PROFILING | 2650 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 2560 if (obj->IsCode()) { | 2651 if (obj->IsCode()) { |
| 2561 PROFILE(CodeDeleteEvent(obj->address())); | 2652 PROFILE(ISOLATE, CodeDeleteEvent(obj->address())); |
| 2562 } | 2653 } |
| 2563 #endif | 2654 #endif |
| 2564 } | 2655 } |
| 2565 | 2656 |
| 2566 | 2657 |
| 2567 void MarkCompactCollector::Initialize() { | 2658 void MarkCompactCollector::Initialize() { |
| 2568 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 2659 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 2569 StaticMarkingVisitor::Initialize(); | 2660 StaticMarkingVisitor::Initialize(); |
| 2570 } | 2661 } |
| 2571 | 2662 |
| 2572 | 2663 |
| 2573 } } // namespace v8::internal | 2664 } } // namespace v8::internal |
| OLD | NEW |