| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 50 const char* Marking::kImpossibleBitPattern = "01"; | 50 const char* Marking::kImpossibleBitPattern = "01"; | 
| 51 | 51 | 
| 52 | 52 | 
| 53 // ------------------------------------------------------------------------- | 53 // ------------------------------------------------------------------------- | 
| 54 // MarkCompactCollector | 54 // MarkCompactCollector | 
| 55 | 55 | 
| 56 MarkCompactCollector::MarkCompactCollector() :  // NOLINT | 56 MarkCompactCollector::MarkCompactCollector() :  // NOLINT | 
| 57 #ifdef DEBUG | 57 #ifdef DEBUG | 
| 58       state_(IDLE), | 58       state_(IDLE), | 
| 59 #endif | 59 #endif | 
|  | 60       sweep_precisely_(false), | 
|  | 61       compacting_(false), | 
| 60       tracer_(NULL), | 62       tracer_(NULL), | 
| 61 #ifdef DEBUG | 63 #ifdef DEBUG | 
| 62       live_young_objects_size_(0), | 64       live_young_objects_size_(0), | 
| 63       live_old_pointer_objects_size_(0), | 65       live_old_pointer_objects_size_(0), | 
| 64       live_old_data_objects_size_(0), | 66       live_old_data_objects_size_(0), | 
| 65       live_code_objects_size_(0), | 67       live_code_objects_size_(0), | 
| 66       live_map_objects_size_(0), | 68       live_map_objects_size_(0), | 
| 67       live_cell_objects_size_(0), | 69       live_cell_objects_size_(0), | 
| 68       live_lo_objects_size_(0), | 70       live_lo_objects_size_(0), | 
| 69       live_bytes_(0), | 71       live_bytes_(0), | 
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 147 } | 149 } | 
| 148 | 150 | 
| 149 | 151 | 
| 150 class VerifyEvacuationVisitor: public ObjectVisitor { | 152 class VerifyEvacuationVisitor: public ObjectVisitor { | 
| 151  public: | 153  public: | 
| 152   void VisitPointers(Object** start, Object** end) { | 154   void VisitPointers(Object** start, Object** end) { | 
| 153     for (Object** current = start; current < end; current++) { | 155     for (Object** current = start; current < end; current++) { | 
| 154       if ((*current)->IsHeapObject()) { | 156       if ((*current)->IsHeapObject()) { | 
| 155         HeapObject* object = HeapObject::cast(*current); | 157         HeapObject* object = HeapObject::cast(*current); | 
| 156         if (MarkCompactCollector::IsOnEvacuationCandidate(object)) { | 158         if (MarkCompactCollector::IsOnEvacuationCandidate(object)) { | 
| 157           HEAP->TracePathToObject(source_); |  | 
| 158           CHECK(false); | 159           CHECK(false); | 
| 159         } | 160         } | 
| 160       } | 161       } | 
| 161     } | 162     } | 
| 162   } | 163   } | 
| 163 | 164 | 
| 164   HeapObject* source_; | 165   HeapObject* source_; | 
| 165 }; | 166 }; | 
| 166 | 167 | 
| 167 | 168 | 
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 218 } | 219 } | 
| 219 #endif | 220 #endif | 
| 220 | 221 | 
| 221 | 222 | 
| 222 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 223 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 
| 223   p->MarkEvacuationCandidate(); | 224   p->MarkEvacuationCandidate(); | 
| 224   evacuation_candidates_.Add(p); | 225   evacuation_candidates_.Add(p); | 
| 225 } | 226 } | 
| 226 | 227 | 
| 227 | 228 | 
|  | 229 bool MarkCompactCollector::StartCompaction() { | 
|  | 230   // Don't start compaction if we are in the middle of incremental | 
|  | 231   // marking cycle. We did not collect any slots. | 
|  | 232   if (!compacting_ && !heap_->incremental_marking()->IsMarking()) { | 
|  | 233     ASSERT(evacuation_candidates_.length() == 0); | 
|  | 234 | 
|  | 235     // TODO(gc) Shrink slots buffer when we receive low memory notification. | 
|  | 236     slots_buffer_.Clear(); | 
|  | 237 | 
|  | 238     CollectEvacuationCandidates(heap()->old_pointer_space()); | 
|  | 239     CollectEvacuationCandidates(heap()->old_data_space()); | 
|  | 240 | 
|  | 241     heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists(); | 
|  | 242     heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); | 
|  | 243 | 
|  | 244     compacting_ = evacuation_candidates_.length() > 0; | 
|  | 245   } | 
|  | 246 | 
|  | 247   return compacting_; | 
|  | 248 } | 
|  | 249 | 
|  | 250 | 
| 228 void MarkCompactCollector::CollectGarbage() { | 251 void MarkCompactCollector::CollectGarbage() { | 
| 229   // Make sure that Prepare() has been called. The individual steps below will | 252   // Make sure that Prepare() has been called. The individual steps below will | 
| 230   // update the state as they proceed. | 253   // update the state as they proceed. | 
| 231   ASSERT(state_ == PREPARE_GC); | 254   ASSERT(state_ == PREPARE_GC); | 
| 232 | 255 | 
| 233   MarkLiveObjects(); | 256   MarkLiveObjects(); | 
| 234   ASSERT(heap_->incremental_marking()->IsStopped()); | 257   ASSERT(heap_->incremental_marking()->IsStopped()); | 
| 235 | 258 | 
| 236   if (FLAG_collect_maps) ClearNonLiveTransitions(); | 259   if (FLAG_collect_maps) ClearNonLiveTransitions(); | 
| 237 | 260 | 
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 363     Page* p = it.next(); | 386     Page* p = it.next(); | 
| 364     if (space->IsFragmented(p)) { | 387     if (space->IsFragmented(p)) { | 
| 365       AddEvacuationCandidate(p); | 388       AddEvacuationCandidate(p); | 
| 366     } else { | 389     } else { | 
| 367       p->ClearEvacuationCandidate(); | 390       p->ClearEvacuationCandidate(); | 
| 368     } | 391     } | 
| 369   } | 392   } | 
| 370 } | 393 } | 
| 371 | 394 | 
| 372 | 395 | 
| 373 static void ClearEvacuationCandidates(PagedSpace* space) { |  | 
| 374   ASSERT(space->identity() == OLD_POINTER_SPACE || |  | 
| 375          space->identity() == OLD_DATA_SPACE); |  | 
| 376 |  | 
| 377   PageIterator it(space); |  | 
| 378   while (it.has_next()) { |  | 
| 379     Page* p = it.next(); |  | 
| 380     p->ClearEvacuationCandidate(); |  | 
| 381   } |  | 
| 382 } |  | 
| 383 |  | 
| 384 |  | 
| 385 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 396 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 
| 386   // TODO(gc) re-enable code flushing. | 397   // TODO(gc) re-enable code flushing. | 
| 387   FLAG_flush_code = false; | 398   FLAG_flush_code = false; | 
| 388   FLAG_always_compact = false; | 399   FLAG_always_compact = false; | 
| 389 | 400 | 
| 390   // Disable collection of maps if incremental marking is enabled. | 401   // Disable collection of maps if incremental marking is enabled. | 
| 391   // TODO(gc) improve maps collection algorithm to work with incremental | 402   // TODO(gc) improve maps collection algorithm to work with incremental | 
| 392   // marking. | 403   // marking. | 
| 393   if (FLAG_incremental_marking) FLAG_collect_maps = false; | 404   if (FLAG_incremental_marking) FLAG_collect_maps = false; | 
| 394 | 405 | 
| 395   // Rather than passing the tracer around we stash it in a static member | 406   // Rather than passing the tracer around we stash it in a static member | 
| 396   // variable. | 407   // variable. | 
| 397   tracer_ = tracer; | 408   tracer_ = tracer; | 
| 398 | 409 | 
| 399 #ifdef DEBUG | 410 #ifdef DEBUG | 
| 400   ASSERT(state_ == IDLE); | 411   ASSERT(state_ == IDLE); | 
| 401   state_ = PREPARE_GC; | 412   state_ = PREPARE_GC; | 
| 402 #endif | 413 #endif | 
| 403   ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 414   ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 
| 404 | 415 | 
| 405   if (FLAG_collect_maps) CreateBackPointers(); | 416   if (FLAG_collect_maps) CreateBackPointers(); | 
| 406 #ifdef ENABLE_GDB_JIT_INTERFACE | 417 #ifdef ENABLE_GDB_JIT_INTERFACE | 
| 407   if (FLAG_gdbjit) { | 418   if (FLAG_gdbjit) { | 
| 408     // If GDBJIT interface is active disable compaction. | 419     // If GDBJIT interface is active disable compaction. | 
| 409     compacting_collection_ = false; | 420     compacting_collection_ = false; | 
| 410   } | 421   } | 
| 411 #endif | 422 #endif | 
| 412 | 423 | 
| 413   if (!FLAG_never_compact) { | 424   if (!FLAG_never_compact) StartCompaction(); | 
| 414     slots_buffer_.Clear(); |  | 
| 415     evacuation_candidates_.Rewind(0); |  | 
| 416 |  | 
| 417     if (!heap()->incremental_marking()->IsMarking()) { |  | 
| 418       CollectEvacuationCandidates(heap()->old_pointer_space()); |  | 
| 419       CollectEvacuationCandidates(heap()->old_data_space()); |  | 
| 420     } else { |  | 
| 421       ClearEvacuationCandidates(heap()->old_pointer_space()); |  | 
| 422       ClearEvacuationCandidates(heap()->old_data_space()); |  | 
| 423     } |  | 
| 424   } |  | 
| 425 | 425 | 
| 426   PagedSpaces spaces; | 426   PagedSpaces spaces; | 
| 427   for (PagedSpace* space = spaces.next(); | 427   for (PagedSpace* space = spaces.next(); | 
| 428        space != NULL; | 428        space != NULL; | 
| 429        space = spaces.next()) { | 429        space = spaces.next()) { | 
| 430     space->PrepareForMarkCompact(); | 430     space->PrepareForMarkCompact(); | 
| 431   } | 431   } | 
| 432 | 432 | 
| 433   if (!heap()->incremental_marking()->IsMarking()) { | 433   if (!heap()->incremental_marking()->IsMarking()) { | 
| 434     ClearMarkbits(heap_); | 434     ClearMarkbits(heap_); | 
| (...skipping 1820 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2255   } | 2255   } | 
| 2256 | 2256 | 
| 2257   heap_->IncrementYoungSurvivorsCounter(survivors_size); | 2257   heap_->IncrementYoungSurvivorsCounter(survivors_size); | 
| 2258   new_space->set_age_mark(new_space->top()); | 2258   new_space->set_age_mark(new_space->top()); | 
| 2259 } | 2259 } | 
| 2260 | 2260 | 
| 2261 | 2261 | 
| 2262 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { | 2262 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { | 
| 2263   AlwaysAllocateScope always_allocate; | 2263   AlwaysAllocateScope always_allocate; | 
| 2264 | 2264 | 
|  | 2265   ASSERT(p->IsEvacuationCandidate() && !p->WasEvacuated()); | 
|  | 2266 | 
| 2265   PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 2267   PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 
| 2266 | 2268 | 
| 2267   MarkBit::CellType* cells = p->markbits()->cells(); | 2269   MarkBit::CellType* cells = p->markbits()->cells(); | 
| 2268 | 2270 | 
| 2269   int last_cell_index = | 2271   int last_cell_index = | 
| 2270       Bitmap::IndexToCell( | 2272       Bitmap::IndexToCell( | 
| 2271           Bitmap::CellAlignIndex( | 2273           Bitmap::CellAlignIndex( | 
| 2272               p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 2274               p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 
| 2273 | 2275 | 
| 2274   int cell_index = Page::kFirstUsedCell; | 2276   int cell_index = Page::kFirstUsedCell; | 
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2307 | 2309 | 
| 2308 void MarkCompactCollector::EvacuatePages() { | 2310 void MarkCompactCollector::EvacuatePages() { | 
| 2309   int npages = evacuation_candidates_.length(); | 2311   int npages = evacuation_candidates_.length(); | 
| 2310   for (int i = 0; i < npages; i++) { | 2312   for (int i = 0; i < npages; i++) { | 
| 2311     Page* p = evacuation_candidates_[i]; | 2313     Page* p = evacuation_candidates_[i]; | 
| 2312     EvacuateLiveObjectsFromPage(p); | 2314     EvacuateLiveObjectsFromPage(p); | 
| 2313   } | 2315   } | 
| 2314 } | 2316 } | 
| 2315 | 2317 | 
| 2316 | 2318 | 
|  | 2319 class EvacuationWeakObjectRetainer : public WeakObjectRetainer { | 
|  | 2320  public: | 
|  | 2321   virtual Object* RetainAs(Object* object) { | 
|  | 2322     if (object->IsHeapObject()) { | 
|  | 2323       HeapObject* heap_object = HeapObject::cast(object); | 
|  | 2324       MapWord map_word = heap_object->map_word(); | 
|  | 2325       if (map_word.IsForwardingAddress()) { | 
|  | 2326         return map_word.ToForwardingAddress(); | 
|  | 2327       } | 
|  | 2328     } | 
|  | 2329     return object; | 
|  | 2330   } | 
|  | 2331 }; | 
|  | 2332 | 
|  | 2333 | 
| 2317 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 2334 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 
| 2318   EvacuateNewSpace(); | 2335   EvacuateNewSpace(); | 
| 2319   EvacuatePages(); | 2336   EvacuatePages(); | 
| 2320 | 2337 | 
| 2321   // Second pass: find pointers to new space and update them. | 2338   // Second pass: find pointers to new space and update them. | 
| 2322   PointersUpdatingVisitor updating_visitor(heap()); | 2339   PointersUpdatingVisitor updating_visitor(heap()); | 
| 2323 | 2340 | 
| 2324   // Update pointers in to space. | 2341   // Update pointers in to space. | 
| 2325   SemiSpaceIterator to_it(heap()->new_space()->bottom(), | 2342   SemiSpaceIterator to_it(heap()->new_space()->bottom(), | 
| 2326                           heap()->new_space()->top()); | 2343                           heap()->new_space()->top()); | 
| 2327   for (HeapObject* object = to_it.Next(); | 2344   for (HeapObject* object = to_it.Next(); | 
| 2328        object != NULL; | 2345        object != NULL; | 
| 2329        object = to_it.Next()) { | 2346        object = to_it.Next()) { | 
| 2330     Map* map = object->map(); | 2347     Map* map = object->map(); | 
| 2331     object->IterateBody(map->instance_type(), | 2348     object->IterateBody(map->instance_type(), | 
| 2332                         object->SizeFromMap(map), | 2349                         object->SizeFromMap(map), | 
| 2333                         &updating_visitor); | 2350                         &updating_visitor); | 
| 2334   } | 2351   } | 
| 2335 | 2352 | 
| 2336   // Update roots. | 2353   // Update roots. | 
| 2337   heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 2354   heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 
| 2338   LiveObjectList::IterateElements(&updating_visitor); | 2355   LiveObjectList::IterateElements(&updating_visitor); | 
| 2339 | 2356 | 
| 2340   { | 2357   { | 
| 2341     StoreBufferRebuildScope scope(heap_, | 2358     StoreBufferRebuildScope scope(heap_, | 
| 2342                                   heap_->store_buffer(), | 2359                                   heap_->store_buffer(), | 
| 2343                                   &Heap::ScavengeStoreBufferCallback); | 2360                                   &Heap::ScavengeStoreBufferCallback); | 
| 2344     heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); | 2361     heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); | 
| 2345   } | 2362   } | 
| 2346   slots_buffer_.Iterate(&updating_visitor); | 2363   slots_buffer_.Update(); | 
| 2347 | 2364 | 
| 2348   // Update pointers from cells. | 2365   // Update pointers from cells. | 
| 2349   HeapObjectIterator cell_iterator(heap_->cell_space()); | 2366   HeapObjectIterator cell_iterator(heap_->cell_space()); | 
| 2350   for (HeapObject* cell = cell_iterator.Next(); | 2367   for (HeapObject* cell = cell_iterator.Next(); | 
| 2351        cell != NULL; | 2368        cell != NULL; | 
| 2352        cell = cell_iterator.Next()) { | 2369        cell = cell_iterator.Next()) { | 
| 2353     if (cell->IsJSGlobalPropertyCell()) { | 2370     if (cell->IsJSGlobalPropertyCell()) { | 
| 2354       Address value_address = | 2371       Address value_address = | 
| 2355           reinterpret_cast<Address>(cell) + | 2372           reinterpret_cast<Address>(cell) + | 
| 2356           (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 2373           (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 
| 2357       updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 2374       updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 
| 2358     } | 2375     } | 
| 2359   } | 2376   } | 
| 2360 | 2377 | 
| 2361   // Update pointer from the global contexts list. | 2378   // Update pointer from the global contexts list. | 
| 2362   updating_visitor.VisitPointer(heap_->global_contexts_list_address()); | 2379   updating_visitor.VisitPointer(heap_->global_contexts_list_address()); | 
| 2363 | 2380 | 
| 2364   heap_->symbol_table()->Iterate(&updating_visitor); | 2381   heap_->symbol_table()->Iterate(&updating_visitor); | 
| 2365 | 2382 | 
| 2366   // Update pointers from external string table. | 2383   // Update pointers from external string table. | 
| 2367   heap_->UpdateReferencesInExternalStringTable( | 2384   heap_->UpdateReferencesInExternalStringTable( | 
| 2368       &UpdateReferenceInExternalStringTableEntry); | 2385       &UpdateReferenceInExternalStringTableEntry); | 
| 2369 | 2386 | 
| 2370   // Update JSFunction pointers from the runtime profiler. | 2387   // Update JSFunction pointers from the runtime profiler. | 
| 2371   heap_->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); | 2388   heap_->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); | 
| 2372 | 2389 | 
|  | 2390   EvacuationWeakObjectRetainer evacuation_object_retainer; | 
|  | 2391   heap()->ProcessWeakReferences(&evacuation_object_retainer); | 
|  | 2392 | 
| 2373 #ifdef DEBUG | 2393 #ifdef DEBUG | 
| 2374   if (FLAG_verify_heap) { | 2394   if (FLAG_verify_heap) { | 
| 2375     VerifyEvacuation(heap_); | 2395     VerifyEvacuation(heap_); | 
| 2376   } | 2396   } | 
| 2377 #endif | 2397 #endif | 
| 2378 | 2398 | 
| 2379   int npages = evacuation_candidates_.length(); | 2399   int npages = evacuation_candidates_.length(); | 
|  | 2400   ASSERT(compacting_ == (npages > 0)); | 
| 2380   for (int i = 0; i < npages; i++) { | 2401   for (int i = 0; i < npages; i++) { | 
| 2381     Page* p = evacuation_candidates_[i]; | 2402     Page* p = evacuation_candidates_[i]; | 
|  | 2403     ASSERT(p->IsEvacuationCandidate() && !p->WasEvacuated()); | 
| 2382     PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 2404     PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 
| 2383     space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); | 2405     space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); | 
| 2384     p->set_scan_on_scavenge(false); | 2406     p->set_scan_on_scavenge(false); | 
| 2385 | 2407     p->ClearEvacuationCandidate(); | 
| 2386     // We are not clearing evacuation candidate flag here | 2408     p->SetFlag(MemoryChunk::EVACUATED); | 
| 2387     // because it is required to notify lazy sweeper to skip | 2409     p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 
| 2388     // these pages. |  | 
| 2389   } | 2410   } | 
|  | 2411   evacuation_candidates_.Rewind(0); | 
|  | 2412   compacting_ = false; | 
| 2390 } | 2413 } | 
| 2391 | 2414 | 
| 2392 | 2415 | 
| 2393 INLINE(static uint32_t SweepFree(PagedSpace* space, | 2416 INLINE(static uint32_t SweepFree(PagedSpace* space, | 
| 2394                                  Page* p, | 2417                                  Page* p, | 
| 2395                                  uint32_t free_start, | 2418                                  uint32_t free_start, | 
| 2396                                  uint32_t region_end, | 2419                                  uint32_t region_end, | 
| 2397                                  uint32_t* cells)); | 2420                                  uint32_t* cells)); | 
| 2398 | 2421 | 
| 2399 | 2422 | 
| (...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2707 | 2730 | 
| 2708 | 2731 | 
| 2709 // Sweeps a space conservatively.  After this has been done the larger free | 2732 // Sweeps a space conservatively.  After this has been done the larger free | 
| 2710 // spaces have been put on the free list and the smaller ones have been | 2733 // spaces have been put on the free list and the smaller ones have been | 
| 2711 // ignored and left untouched.  A free space is always either ignored or put | 2734 // ignored and left untouched.  A free space is always either ignored or put | 
| 2712 // on the free list, never split up into two parts.  This is important | 2735 // on the free list, never split up into two parts.  This is important | 
| 2713 // because it means that any FreeSpace maps left actually describe a region of | 2736 // because it means that any FreeSpace maps left actually describe a region of | 
| 2714 // memory that can be ignored when scanning.  Dead objects other than free | 2737 // memory that can be ignored when scanning.  Dead objects other than free | 
| 2715 // spaces will not contain the free space map. | 2738 // spaces will not contain the free space map. | 
| 2716 int MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { | 2739 int MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { | 
| 2717   // We might start advancing sweeper before evacuation happened. | 2740   ASSERT(!p->IsEvacuationCandidate() && !p->WasEvacuated()); | 
| 2718   if (p->IsEvacuationCandidate()) return 0; |  | 
| 2719 | 2741 | 
| 2720   int freed_bytes = 0; | 2742   int freed_bytes = 0; | 
| 2721 | 2743 | 
| 2722   MarkBit::CellType* cells = p->markbits()->cells(); | 2744   MarkBit::CellType* cells = p->markbits()->cells(); | 
| 2723 | 2745 | 
| 2724   p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 2746   p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 
| 2725 | 2747 | 
| 2726   // This is the start of the 32 word block that we are currently looking at. | 2748   // This is the start of the 32 word block that we are currently looking at. | 
| 2727   Address block_address = p->ObjectAreaStart(); | 2749   Address block_address = p->ObjectAreaStart(); | 
| 2728 | 2750 | 
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2796   return freed_bytes; | 2818   return freed_bytes; | 
| 2797 } | 2819 } | 
| 2798 | 2820 | 
| 2799 | 2821 | 
| 2800 // Sweep a space precisely.  After this has been done the space can | 2822 // Sweep a space precisely.  After this has been done the space can | 
| 2801 // be iterated precisely, hitting only the live objects.  Code space | 2823 // be iterated precisely, hitting only the live objects.  Code space | 
| 2802 // is always swept precisely because we want to be able to iterate | 2824 // is always swept precisely because we want to be able to iterate | 
| 2803 // over it.  Map space is swept precisely, because it is not compacted. | 2825 // over it.  Map space is swept precisely, because it is not compacted. | 
| 2804 static void SweepPrecisely(PagedSpace* space, | 2826 static void SweepPrecisely(PagedSpace* space, | 
| 2805                            Page* p) { | 2827                            Page* p) { | 
|  | 2828   ASSERT(!p->IsEvacuationCandidate() && !p->WasEvacuated()); | 
| 2806   MarkBit::CellType* cells = p->markbits()->cells(); | 2829   MarkBit::CellType* cells = p->markbits()->cells(); | 
| 2807 | 2830 | 
| 2808   p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 2831   p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); | 
| 2809 | 2832 | 
| 2810   int last_cell_index = | 2833   int last_cell_index = | 
| 2811       Bitmap::IndexToCell( | 2834       Bitmap::IndexToCell( | 
| 2812           Bitmap::CellAlignIndex( | 2835           Bitmap::CellAlignIndex( | 
| 2813               p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 2836               p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 
| 2814 | 2837 | 
| 2815   int cell_index = Page::kFirstUsedCell; | 2838   int cell_index = Page::kFirstUsedCell; | 
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3051     if (buffer_idx_ == buffers_.length()) { | 3074     if (buffer_idx_ == buffers_.length()) { | 
| 3052       buffers_.Add(new ObjectSlot[kBufferSize]); | 3075       buffers_.Add(new ObjectSlot[kBufferSize]); | 
| 3053     } | 3076     } | 
| 3054     buffer_ = buffers_[buffer_idx_]; | 3077     buffer_ = buffers_[buffer_idx_]; | 
| 3055   } | 3078   } | 
| 3056 | 3079 | 
| 3057   buffer_[idx_++] = slot; | 3080   buffer_[idx_++] = slot; | 
| 3058 } | 3081 } | 
| 3059 | 3082 | 
| 3060 | 3083 | 
| 3061 void SlotsBuffer::Iterate(ObjectVisitor* visitor) { | 3084 static inline void UpdateSlot(Object** slot) { | 
|  | 3085   Object* obj = *slot; | 
|  | 3086   if (!obj->IsHeapObject()) return; | 
|  | 3087 | 
|  | 3088   HeapObject* heap_obj = HeapObject::cast(obj); | 
|  | 3089 | 
|  | 3090   MapWord map_word = heap_obj->map_word(); | 
|  | 3091   if (map_word.IsForwardingAddress()) { | 
|  | 3092     ASSERT(MarkCompactCollector::IsOnEvacuationCandidate(*slot)); | 
|  | 3093     *slot = map_word.ToForwardingAddress(); | 
|  | 3094     ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(*slot)); | 
|  | 3095   } | 
|  | 3096 } | 
|  | 3097 | 
|  | 3098 | 
|  | 3099 void SlotsBuffer::Update() { | 
| 3062   if (buffer_idx_ < 0) return; | 3100   if (buffer_idx_ < 0) return; | 
| 3063 | 3101 | 
| 3064   for (int buffer_index = 0; buffer_index < buffer_idx_; ++buffer_index) { | 3102   for (int buffer_index = 0; buffer_index < buffer_idx_; ++buffer_index) { | 
| 3065     ObjectSlot* buffer = buffers_[buffer_index]; | 3103     ObjectSlot* buffer = buffers_[buffer_index]; | 
| 3066     for (int slot_idx = 0; slot_idx < kBufferSize; ++slot_idx) { | 3104     for (int slot_idx = 0; slot_idx < kBufferSize; ++slot_idx) { | 
| 3067       visitor->VisitPointer(buffer[slot_idx]); | 3105       UpdateSlot(buffer[slot_idx]); | 
| 3068     } | 3106     } | 
| 3069   } | 3107   } | 
| 3070 | 3108 | 
| 3071   ObjectSlot* last_buffer = buffers_[buffer_idx_]; | 3109   ObjectSlot* last_buffer = buffers_[buffer_idx_]; | 
| 3072   for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 3110   for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 
| 3073     visitor->VisitPointer(last_buffer[slot_idx]); | 3111     UpdateSlot(last_buffer[slot_idx]); | 
| 3074   } | 3112   } | 
| 3075 } | 3113 } | 
| 3076 | 3114 | 
| 3077 | 3115 | 
| 3078 void SlotsBuffer::Report() { | 3116 void SlotsBuffer::Report() { | 
| 3079 } | 3117 } | 
| 3080 | 3118 | 
| 3081 | 3119 | 
| 3082 } }  // namespace v8::internal | 3120 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|