Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(188)

Side by Side Diff: src/mark-compact.cc

Issue 7326012: Split single slots buffer into per page slots buffers. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
53 // ------------------------------------------------------------------------- 53 // -------------------------------------------------------------------------
54 // MarkCompactCollector 54 // MarkCompactCollector
55 55
56 MarkCompactCollector::MarkCompactCollector() : // NOLINT 56 MarkCompactCollector::MarkCompactCollector() : // NOLINT
57 #ifdef DEBUG 57 #ifdef DEBUG
58 state_(IDLE), 58 state_(IDLE),
59 #endif 59 #endif
60 sweep_precisely_(false), 60 sweep_precisely_(false),
61 compacting_(false), 61 compacting_(false),
62 tracer_(NULL), 62 tracer_(NULL),
63 migration_slots_buffer_(NULL),
63 #ifdef DEBUG 64 #ifdef DEBUG
64 live_young_objects_size_(0), 65 live_young_objects_size_(0),
65 live_old_pointer_objects_size_(0), 66 live_old_pointer_objects_size_(0),
66 live_old_data_objects_size_(0), 67 live_old_data_objects_size_(0),
67 live_code_objects_size_(0), 68 live_code_objects_size_(0),
68 live_map_objects_size_(0), 69 live_map_objects_size_(0),
69 live_cell_objects_size_(0), 70 live_cell_objects_size_(0),
70 live_lo_objects_size_(0), 71 live_lo_objects_size_(0),
71 live_bytes_(0), 72 live_bytes_(0),
72 #endif 73 #endif
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
225 evacuation_candidates_.Add(p); 226 evacuation_candidates_.Add(p);
226 } 227 }
227 228
228 229
229 bool MarkCompactCollector::StartCompaction() { 230 bool MarkCompactCollector::StartCompaction() {
230 // Don't start compaction if we are in the middle of incremental 231 // Don't start compaction if we are in the middle of incremental
231 // marking cycle. We did not collect any slots. 232 // marking cycle. We did not collect any slots.
232 if (!compacting_ && !heap_->incremental_marking()->IsMarking()) { 233 if (!compacting_ && !heap_->incremental_marking()->IsMarking()) {
233 ASSERT(evacuation_candidates_.length() == 0); 234 ASSERT(evacuation_candidates_.length() == 0);
234 235
235 // TODO(gc) Shrink slots buffer when we receive low memory notification.
236 slots_buffer_.Clear();
237
238 CollectEvacuationCandidates(heap()->old_pointer_space()); 236 CollectEvacuationCandidates(heap()->old_pointer_space());
239 CollectEvacuationCandidates(heap()->old_data_space()); 237 CollectEvacuationCandidates(heap()->old_data_space());
240 238
241 heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists(); 239 heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists();
242 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); 240 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists();
243 241
244 compacting_ = evacuation_candidates_.length() > 0; 242 compacting_ = evacuation_candidates_.length() > 0;
245 } 243 }
246 244
247 return compacting_; 245 return compacting_;
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
377 new_mark_bit.Set(); 375 new_mark_bit.Set();
378 return true; 376 return true;
379 } 377 }
380 378
381 379
382 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { 380 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
383 ASSERT(space->identity() == OLD_POINTER_SPACE || 381 ASSERT(space->identity() == OLD_POINTER_SPACE ||
384 space->identity() == OLD_DATA_SPACE); 382 space->identity() == OLD_DATA_SPACE);
385 383
386 PageIterator it(space); 384 PageIterator it(space);
385 if (it.has_next()) it.next(); // Never compact the first page.
387 while (it.has_next()) { 386 while (it.has_next()) {
388 Page* p = it.next(); 387 Page* p = it.next();
389 if (space->IsFragmented(p)) { 388 if (space->IsFragmented(p)) {
390 AddEvacuationCandidate(p); 389 AddEvacuationCandidate(p);
391 } else { 390 } else {
392 p->ClearEvacuationCandidate(); 391 p->ClearEvacuationCandidate();
393 } 392 }
394 } 393 }
395 } 394 }
396 395
(...skipping 1659 matching lines...) Expand 10 before | Expand all | Expand 10 after
2056 2055
2057 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { 2056 for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
2058 Object* value = Memory::Object_at(src_slot); 2057 Object* value = Memory::Object_at(src_slot);
2059 2058
2060 Memory::Object_at(dst_slot) = value; 2059 Memory::Object_at(dst_slot) = value;
2061 2060
2062 if (heap_->InNewSpace(value)) { 2061 if (heap_->InNewSpace(value)) {
2063 heap_->store_buffer()->Mark(dst_slot); 2062 heap_->store_buffer()->Mark(dst_slot);
2064 } else if (value->IsHeapObject() && 2063 } else if (value->IsHeapObject() &&
2065 MarkCompactCollector::IsOnEvacuationCandidate(value)) { 2064 MarkCompactCollector::IsOnEvacuationCandidate(value)) {
2066 slots_buffer_.Add(reinterpret_cast<Object**>(dst_slot)); 2065 SlotsBuffer::AddTo(&slots_buffer_allocator_,
2066 &migration_slots_buffer_,
2067 reinterpret_cast<Object**>(dst_slot),
2068 SlotsBuffer::IGNORE_OVERFLOW);
2067 } 2069 }
2068 2070
2069 src_slot += kPointerSize; 2071 src_slot += kPointerSize;
2070 dst_slot += kPointerSize; 2072 dst_slot += kPointerSize;
2071 } 2073 }
2072 } else { 2074 } else {
2073 heap_->CopyBlock(dst, src, size); 2075 heap_->CopyBlock(dst, src, size);
2074 } 2076 }
2075 Memory::Address_at(src) = dst; 2077 Memory::Address_at(src) = dst;
2076 } 2078 }
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
2261 } 2263 }
2262 2264
2263 heap_->IncrementYoungSurvivorsCounter(survivors_size); 2265 heap_->IncrementYoungSurvivorsCounter(survivors_size);
2264 new_space->set_age_mark(new_space->top()); 2266 new_space->set_age_mark(new_space->top());
2265 } 2267 }
2266 2268
2267 2269
2268 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { 2270 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
2269 AlwaysAllocateScope always_allocate; 2271 AlwaysAllocateScope always_allocate;
2270 2272
2271 ASSERT(p->IsEvacuationCandidate() && !p->WasEvacuated()); 2273 ASSERT(p->IsEvacuationCandidate() && !p->WasSwept());
2272 2274
2273 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 2275 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
2274 2276
2275 MarkBit::CellType* cells = p->markbits()->cells(); 2277 MarkBit::CellType* cells = p->markbits()->cells();
2276 2278
2277 int last_cell_index = 2279 int last_cell_index =
2278 Bitmap::IndexToCell( 2280 Bitmap::IndexToCell(
2279 Bitmap::CellAlignIndex( 2281 Bitmap::CellAlignIndex(
2280 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); 2282 p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
2281 2283
(...skipping 28 matching lines...) Expand all
2310 ASSERT(object->map_word().IsForwardingAddress()); 2312 ASSERT(object->map_word().IsForwardingAddress());
2311 } 2313 }
2312 } 2314 }
2313 } 2315 }
2314 2316
2315 2317
2316 void MarkCompactCollector::EvacuatePages() { 2318 void MarkCompactCollector::EvacuatePages() {
2317 int npages = evacuation_candidates_.length(); 2319 int npages = evacuation_candidates_.length();
2318 for (int i = 0; i < npages; i++) { 2320 for (int i = 0; i < npages; i++) {
2319 Page* p = evacuation_candidates_[i]; 2321 Page* p = evacuation_candidates_[i];
2320 EvacuateLiveObjectsFromPage(p); 2322 ASSERT(p->IsEvacuationCandidate() ||
2323 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
2324 if (p->IsEvacuationCandidate()) {
2325 EvacuateLiveObjectsFromPage(p);
2326 }
2321 } 2327 }
2322 } 2328 }
2323 2329
2324 2330
2325 class EvacuationWeakObjectRetainer : public WeakObjectRetainer { 2331 class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
2326 public: 2332 public:
2327 virtual Object* RetainAs(Object* object) { 2333 virtual Object* RetainAs(Object* object) {
2328 if (object->IsHeapObject()) { 2334 if (object->IsHeapObject()) {
2329 HeapObject* heap_object = HeapObject::cast(object); 2335 HeapObject* heap_object = HeapObject::cast(object);
2330 MapWord map_word = heap_object->map_word(); 2336 MapWord map_word = heap_object->map_word();
2331 if (map_word.IsForwardingAddress()) { 2337 if (map_word.IsForwardingAddress()) {
2332 return map_word.ToForwardingAddress(); 2338 return map_word.ToForwardingAddress();
2333 } 2339 }
2334 } 2340 }
2335 return object; 2341 return object;
2336 } 2342 }
2337 }; 2343 };
2338 2344
2339 2345
2346 static inline void UpdateSlot(Object** slot) {
2347 Object* obj = *slot;
2348 if (!obj->IsHeapObject()) return;
2349
2350 HeapObject* heap_obj = HeapObject::cast(obj);
2351
2352 MapWord map_word = heap_obj->map_word();
2353 if (map_word.IsForwardingAddress()) {
2354 ASSERT(MarkCompactCollector::IsOnEvacuationCandidate(*slot));
2355 *slot = map_word.ToForwardingAddress();
2356 ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(*slot));
2357 }
2358 }
2359
2360
2361 static inline void UpdateSlotsInRange(Object** start, Object** end) {
2362 for (Object** slot = start;
2363 slot < end;
2364 slot++) {
2365 Object* obj = *slot;
2366 if (obj->IsHeapObject() &&
2367 MarkCompactCollector::IsOnEvacuationCandidate(obj)) {
2368 MapWord map_word = HeapObject::cast(obj)->map_word();
2369 if (map_word.IsForwardingAddress()) {
2370 *slot = map_word.ToForwardingAddress();
2371 ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(*slot));
2372 }
2373 }
2374 }
2375 }
2376
2377
2378 static void UpdateSlotsOnPage(Page* p, ObjectVisitor* visitor) {
2379 // TODO(gc) this is basically clone of SweepPrecisely
2380 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
2381 MarkBit::CellType* cells = p->markbits()->cells();
2382
2383 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2384 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
2385 p->MarkSwept();
2386
2387 int last_cell_index =
2388 Bitmap::IndexToCell(
2389 Bitmap::CellAlignIndex(
2390 p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
2391
2392 int cell_index = Page::kFirstUsedCell;
2393 Address free_start = p->ObjectAreaStart();
2394 ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
2395 Address object_address = p->ObjectAreaStart();
2396 int offsets[16];
2397
2398 for (cell_index = Page::kFirstUsedCell;
2399 cell_index < last_cell_index;
2400 cell_index++, object_address += 32 * kPointerSize) {
2401 ASSERT((unsigned)cell_index ==
2402 Bitmap::IndexToCell(
2403 Bitmap::CellAlignIndex(
2404 p->AddressToMarkbitIndex(object_address))));
2405 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
2406 int live_index = 0;
2407 for ( ; live_objects != 0; live_objects--) {
2408 Address free_end = object_address + offsets[live_index++] * kPointerSize;
2409 if (free_end != free_start) {
2410 space->Free(free_start, free_end - free_start);
2411 }
2412 HeapObject* live_object = HeapObject::FromAddress(free_end);
2413 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(live_object)));
2414 int size = live_object->Size();
2415 UpdateSlotsInRange(HeapObject::RawField(live_object, kPointerSize),
2416 HeapObject::RawField(live_object, size));
2417 free_start = free_end + size;
2418 }
2419 }
2420 if (free_start != p->ObjectAreaEnd()) {
2421 space->Free(free_start, p->ObjectAreaEnd() - free_start);
2422 }
2423 }
2424
2425
2340 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 2426 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
2341 EvacuateNewSpace(); 2427 EvacuateNewSpace();
2342 EvacuatePages(); 2428 EvacuatePages();
2343 2429
2344 // Second pass: find pointers to new space and update them. 2430 // Second pass: find pointers to new space and update them.
2345 PointersUpdatingVisitor updating_visitor(heap()); 2431 PointersUpdatingVisitor updating_visitor(heap());
2346 2432
2347 // Update pointers in to space. 2433 // Update pointers in to space.
2348 SemiSpaceIterator to_it(heap()->new_space()->bottom(), 2434 SemiSpaceIterator to_it(heap()->new_space()->bottom(),
2349 heap()->new_space()->top()); 2435 heap()->new_space()->top());
2350 for (HeapObject* object = to_it.Next(); 2436 for (HeapObject* object = to_it.Next();
2351 object != NULL; 2437 object != NULL;
2352 object = to_it.Next()) { 2438 object = to_it.Next()) {
2353 Map* map = object->map(); 2439 Map* map = object->map();
2354 object->IterateBody(map->instance_type(), 2440 object->IterateBody(map->instance_type(),
2355 object->SizeFromMap(map), 2441 object->SizeFromMap(map),
2356 &updating_visitor); 2442 &updating_visitor);
2357 } 2443 }
2358 2444
2359 // Update roots. 2445 // Update roots.
2360 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 2446 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
2361 LiveObjectList::IterateElements(&updating_visitor); 2447 LiveObjectList::IterateElements(&updating_visitor);
2362 2448
2363 { 2449 {
2364 StoreBufferRebuildScope scope(heap_, 2450 StoreBufferRebuildScope scope(heap_,
2365 heap_->store_buffer(), 2451 heap_->store_buffer(),
2366 &Heap::ScavengeStoreBufferCallback); 2452 &Heap::ScavengeStoreBufferCallback);
2367 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 2453 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
2368 } 2454 }
2369 slots_buffer_.Update(); 2455
2456 SlotsBuffer::UpdateSlotsRecordedIn(migration_slots_buffer_);
2457 if (FLAG_trace_fragmentation) {
2458 PrintF(" migration slots buffer: %d\n",
2459 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
2460 }
2461
2462 int npages = evacuation_candidates_.length();
2463 for (int i = 0; i < npages; i++) {
2464 Page* p = evacuation_candidates_[i];
2465 ASSERT(p->IsEvacuationCandidate() ||
2466 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
2467
2468 if (p->IsEvacuationCandidate()) {
2469 SlotsBuffer::UpdateSlotsRecordedIn(p->slots_buffer());
2470 if (FLAG_trace_fragmentation) {
2471 PrintF(" page %p slots buffer: %d\n",
2472 reinterpret_cast<void*>(p),
2473 SlotsBuffer::SizeOfChain(p->slots_buffer()));
2474 }
2475 } else {
2476 UpdateSlotsOnPage(p, &updating_visitor);
2477 }
2478 }
2370 2479
2371 // Update pointers from cells. 2480 // Update pointers from cells.
2372 HeapObjectIterator cell_iterator(heap_->cell_space()); 2481 HeapObjectIterator cell_iterator(heap_->cell_space());
2373 for (HeapObject* cell = cell_iterator.Next(); 2482 for (HeapObject* cell = cell_iterator.Next();
2374 cell != NULL; 2483 cell != NULL;
2375 cell = cell_iterator.Next()) { 2484 cell = cell_iterator.Next()) {
2376 if (cell->IsJSGlobalPropertyCell()) { 2485 if (cell->IsJSGlobalPropertyCell()) {
2377 Address value_address = 2486 Address value_address =
2378 reinterpret_cast<Address>(cell) + 2487 reinterpret_cast<Address>(cell) +
2379 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); 2488 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
(...skipping 15 matching lines...) Expand all
2395 2504
2396 EvacuationWeakObjectRetainer evacuation_object_retainer; 2505 EvacuationWeakObjectRetainer evacuation_object_retainer;
2397 heap()->ProcessWeakReferences(&evacuation_object_retainer); 2506 heap()->ProcessWeakReferences(&evacuation_object_retainer);
2398 2507
2399 #ifdef DEBUG 2508 #ifdef DEBUG
2400 if (FLAG_verify_heap) { 2509 if (FLAG_verify_heap) {
2401 VerifyEvacuation(heap_); 2510 VerifyEvacuation(heap_);
2402 } 2511 }
2403 #endif 2512 #endif
2404 2513
2405 int npages = evacuation_candidates_.length(); 2514 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_);
2406 ASSERT(compacting_ == (npages > 0)); 2515 ASSERT(migration_slots_buffer_ == NULL);
2407 for (int i = 0; i < npages; i++) { 2516 for (int i = 0; i < npages; i++) {
2408 Page* p = evacuation_candidates_[i]; 2517 Page* p = evacuation_candidates_[i];
2409 ASSERT(p->IsEvacuationCandidate() && !p->WasEvacuated()); 2518 if (!p->IsEvacuationCandidate()) continue;
2410 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 2519 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
2411 space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); 2520 space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize);
2412 p->set_scan_on_scavenge(false); 2521 p->set_scan_on_scavenge(false);
2522 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
2413 p->ClearEvacuationCandidate(); 2523 p->ClearEvacuationCandidate();
2414 p->SetFlag(MemoryChunk::EVACUATED); 2524 p->MarkSwept();
2415 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); 2525 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2416 } 2526 }
2417 evacuation_candidates_.Rewind(0); 2527 evacuation_candidates_.Rewind(0);
2418 compacting_ = false; 2528 compacting_ = false;
2419 } 2529 }
2420 2530
2421 2531
2422 INLINE(static uint32_t SweepFree(PagedSpace* space, 2532 INLINE(static uint32_t SweepFree(PagedSpace* space,
2423 Page* p, 2533 Page* p,
2424 uint32_t free_start, 2534 uint32_t free_start,
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after
2736 2846
2737 2847
2738 // Sweeps a space conservatively. After this has been done the larger free 2848 // Sweeps a space conservatively. After this has been done the larger free
2739 // spaces have been put on the free list and the smaller ones have been 2849 // spaces have been put on the free list and the smaller ones have been
2740 // ignored and left untouched. A free space is always either ignored or put 2850 // ignored and left untouched. A free space is always either ignored or put
2741 // on the free list, never split up into two parts. This is important 2851 // on the free list, never split up into two parts. This is important
2742 // because it means that any FreeSpace maps left actually describe a region of 2852 // because it means that any FreeSpace maps left actually describe a region of
2743 // memory that can be ignored when scanning. Dead objects other than free 2853 // memory that can be ignored when scanning. Dead objects other than free
2744 // spaces will not contain the free space map. 2854 // spaces will not contain the free space map.
2745 int MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { 2855 int MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
2746 ASSERT(!p->IsEvacuationCandidate() && !p->WasEvacuated()); 2856 ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
2747 2857
2748 int freed_bytes = 0; 2858 int freed_bytes = 0;
2749 2859
2750 MarkBit::CellType* cells = p->markbits()->cells(); 2860 MarkBit::CellType* cells = p->markbits()->cells();
2751 2861
2752 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); 2862 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2753 2863
2754 // This is the start of the 32 word block that we are currently looking at. 2864 // This is the start of the 32 word block that we are currently looking at.
2755 Address block_address = p->ObjectAreaStart(); 2865 Address block_address = p->ObjectAreaStart();
2756 2866
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
2824 return freed_bytes; 2934 return freed_bytes;
2825 } 2935 }
2826 2936
2827 2937
2828 // Sweep a space precisely. After this has been done the space can 2938 // Sweep a space precisely. After this has been done the space can
2829 // be iterated precisely, hitting only the live objects. Code space 2939 // be iterated precisely, hitting only the live objects. Code space
2830 // is always swept precisely because we want to be able to iterate 2940 // is always swept precisely because we want to be able to iterate
2831 // over it. Map space is swept precisely, because it is not compacted. 2941 // over it. Map space is swept precisely, because it is not compacted.
2832 static void SweepPrecisely(PagedSpace* space, 2942 static void SweepPrecisely(PagedSpace* space,
2833 Page* p) { 2943 Page* p) {
2834 ASSERT(!p->IsEvacuationCandidate() && !p->WasEvacuated()); 2944 ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
2835 MarkBit::CellType* cells = p->markbits()->cells(); 2945 MarkBit::CellType* cells = p->markbits()->cells();
2836 2946
2837 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); 2947 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2838 2948
2839 int last_cell_index = 2949 int last_cell_index =
2840 Bitmap::IndexToCell( 2950 Bitmap::IndexToCell(
2841 Bitmap::CellAlignIndex( 2951 Bitmap::CellAlignIndex(
2842 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); 2952 p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
2843 2953
2844 int cell_index = Page::kFirstUsedCell; 2954 int cell_index = Page::kFirstUsedCell;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2885 int newspace_size = space->heap()->new_space()->Size(); 2995 int newspace_size = space->heap()->new_space()->Size();
2886 2996
2887 while (it.has_next()) { 2997 while (it.has_next()) {
2888 Page* p = it.next(); 2998 Page* p = it.next();
2889 2999
2890 if (p->IsEvacuationCandidate()) { 3000 if (p->IsEvacuationCandidate()) {
2891 ASSERT(evacuation_candidates_.length() > 0); 3001 ASSERT(evacuation_candidates_.length() > 0);
2892 continue; 3002 continue;
2893 } 3003 }
2894 3004
3005 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
3006 // Will be processed in EvacuateNewSpaceAndCandidates.
3007 continue;
3008 }
3009
2895 switch (sweeper) { 3010 switch (sweeper) {
2896 case CONSERVATIVE: { 3011 case CONSERVATIVE: {
2897 SweepConservatively(space, p); 3012 SweepConservatively(space, p);
2898 break; 3013 break;
2899 } 3014 }
2900 case LAZY_CONSERVATIVE: { 3015 case LAZY_CONSERVATIVE: {
2901 Page* next_page = p->next_page(); 3016 Page* next_page = p->next_page();
2902 freed_bytes += SweepConservatively(space, p); 3017 freed_bytes += SweepConservatively(space, p);
2903 // TODO(gc): tweak the heuristic. 3018 // TODO(gc): tweak the heuristic.
2904 if (freed_bytes >= newspace_size && p != space->LastPage()) { 3019 if (freed_bytes >= newspace_size && p != space->LastPage()) {
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
3045 } 3160 }
3046 #endif 3161 #endif
3047 } 3162 }
3048 3163
3049 3164
3050 void MarkCompactCollector::Initialize() { 3165 void MarkCompactCollector::Initialize() {
3051 StaticMarkingVisitor::Initialize(); 3166 StaticMarkingVisitor::Initialize();
3052 } 3167 }
3053 3168
3054 3169
3055 SlotsBuffer::SlotsBuffer() 3170 void SlotsBuffer::UpdateSlots() {
3056 : buffers_(0), 3171 ObjectSlot* slots = AsArray();
3057 buffer_(NULL), 3172 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
3058 idx_(kBufferSize), 3173 UpdateSlot(slots[slot_idx]);
3059 buffer_idx_(-1) {
3060 }
3061
3062
3063 SlotsBuffer::~SlotsBuffer() {
3064 for (int buffer_index = 0; buffer_index < buffers_.length(); ++buffer_index) {
3065 delete buffers_[buffer_index];
3066 } 3174 }
3067 } 3175 }
3068 3176
3069 3177
3070 void SlotsBuffer::Clear() { 3178 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) {
3071 idx_ = kBufferSize; 3179 // TODO(gc) Consider maintaining local cache of buffers.
3072 buffer_idx_ = -1; 3180 SlotsBuffer* buffer = reinterpret_cast<SlotsBuffer*>(
Erik Corry 2011/07/08 13:02:38 This could just be new SlotsBuffer().
3181 malloc(SlotsBuffer::kSizeWords * kPointerSize));
3182 buffer->Initialize(next_buffer);
3183 return buffer;
3073 } 3184 }
3074 3185
3075 3186
3076 void SlotsBuffer::Add(Object** slot) { 3187 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) {
3077 if (idx_ == kBufferSize) { 3188 free(buffer);
3078 idx_ = 0;
3079 buffer_idx_++;
3080 if (buffer_idx_ == buffers_.length()) {
3081 buffers_.Add(new ObjectSlot[kBufferSize]);
3082 }
3083 buffer_ = buffers_[buffer_idx_];
3084 }
3085
3086 buffer_[idx_++] = slot;
3087 } 3189 }
3088 3190
3089 3191
3090 static inline void UpdateSlot(Object** slot) { 3192 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) {
3091 Object* obj = *slot; 3193 SlotsBuffer* buffer = *buffer_address;
3092 if (!obj->IsHeapObject()) return; 3194 while (buffer != NULL) {
3093 3195 SlotsBuffer* next_buffer = buffer->next();
3094 HeapObject* heap_obj = HeapObject::cast(obj); 3196 DeallocateBuffer(buffer);
3095 3197 buffer = next_buffer;
3096 MapWord map_word = heap_obj->map_word();
3097 if (map_word.IsForwardingAddress()) {
3098 ASSERT(MarkCompactCollector::IsOnEvacuationCandidate(*slot));
3099 *slot = map_word.ToForwardingAddress();
3100 ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(*slot));
3101 } 3198 }
3199 *buffer_address = NULL;
3102 } 3200 }
3103 3201
3104 3202
3105 void SlotsBuffer::Update() {
3106 if (buffer_idx_ < 0) return;
3107
3108 for (int buffer_index = 0; buffer_index < buffer_idx_; ++buffer_index) {
3109 ObjectSlot* buffer = buffers_[buffer_index];
3110 for (int slot_idx = 0; slot_idx < kBufferSize; ++slot_idx) {
3111 UpdateSlot(buffer[slot_idx]);
3112 }
3113 }
3114
3115 ObjectSlot* last_buffer = buffers_[buffer_idx_];
3116 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
3117 UpdateSlot(last_buffer[slot_idx]);
3118 }
3119 }
3120
3121
3122 void SlotsBuffer::Report() {
3123 }
3124
3125
3126 } } // namespace v8::internal 3203 } } // namespace v8::internal
OLDNEW
« src/mark-compact.h ('K') | « src/mark-compact.h ('k') | src/spaces.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698