Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(10)

Side by Side Diff: src/mark-compact.cc

Issue 3066044: Generalize virtually dispatched scavenger to virtually dispatched specialized visitors. (Closed)
Patch Set: cleanup Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 14 matching lines...) Expand all
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "execution.h" 30 #include "execution.h"
31 #include "heap-profiler.h" 31 #include "heap-profiler.h"
32 #include "global-handles.h" 32 #include "global-handles.h"
33 #include "ic-inl.h" 33 #include "ic-inl.h"
34 #include "mark-compact.h" 34 #include "mark-compact.h"
35 #include "objects-visiting.h"
35 #include "stub-cache.h" 36 #include "stub-cache.h"
36 37
37 namespace v8 { 38 namespace v8 {
38 namespace internal { 39 namespace internal {
39 40
40 // ------------------------------------------------------------------------- 41 // -------------------------------------------------------------------------
41 // MarkCompactCollector 42 // MarkCompactCollector
42 43
43 bool MarkCompactCollector::force_compaction_ = false; 44 bool MarkCompactCollector::force_compaction_ = false;
44 bool MarkCompactCollector::compacting_collection_ = false; 45 bool MarkCompactCollector::compacting_collection_ = false;
(...skipping 11 matching lines...) Expand all
56 int MarkCompactCollector::live_bytes_ = 0; 57 int MarkCompactCollector::live_bytes_ = 0;
57 int MarkCompactCollector::live_young_objects_size_ = 0; 58 int MarkCompactCollector::live_young_objects_size_ = 0;
58 int MarkCompactCollector::live_old_data_objects_size_ = 0; 59 int MarkCompactCollector::live_old_data_objects_size_ = 0;
59 int MarkCompactCollector::live_old_pointer_objects_size_ = 0; 60 int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
60 int MarkCompactCollector::live_code_objects_size_ = 0; 61 int MarkCompactCollector::live_code_objects_size_ = 0;
61 int MarkCompactCollector::live_map_objects_size_ = 0; 62 int MarkCompactCollector::live_map_objects_size_ = 0;
62 int MarkCompactCollector::live_cell_objects_size_ = 0; 63 int MarkCompactCollector::live_cell_objects_size_ = 0;
63 int MarkCompactCollector::live_lo_objects_size_ = 0; 64 int MarkCompactCollector::live_lo_objects_size_ = 0;
64 #endif 65 #endif
65 66
67
66 void MarkCompactCollector::CollectGarbage() { 68 void MarkCompactCollector::CollectGarbage() {
67 // Make sure that Prepare() has been called. The individual steps below will 69 // Make sure that Prepare() has been called. The individual steps below will
68 // update the state as they proceed. 70 // update the state as they proceed.
69 ASSERT(state_ == PREPARE_GC); 71 ASSERT(state_ == PREPARE_GC);
70 72
71 // Prepare has selected whether to compact the old generation or not. 73 // Prepare has selected whether to compact the old generation or not.
72 // Tell the tracer. 74 // Tell the tracer.
73 if (IsCompacting()) tracer_->set_is_compacting(); 75 if (IsCompacting()) tracer_->set_is_compacting();
74 76
75 MarkLiveObjects(); 77 MarkLiveObjects();
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
237 // page dirty marks. Therefore, we only replace the string with its left 239 // page dirty marks. Therefore, we only replace the string with its left
238 // substring when page dirty marks do not change. 240 // substring when page dirty marks do not change.
239 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); 241 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
240 if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object; 242 if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
241 243
242 *p = first; 244 *p = first;
243 return HeapObject::cast(first); 245 return HeapObject::cast(first);
244 } 246 }
245 247
246 248
247 // Helper class for marking pointers in HeapObjects. 249 class StaticMarkingVisitor : public StaticVisitorBase {
248 class MarkingVisitor : public ObjectVisitor {
249 public: 250 public:
250 void VisitPointer(Object** p) { 251 static inline void IterateBody(Map* map, HeapObject* obj) {
252 table_.GetVisitor(map)(map, obj);
253 }
254
255 static void Initialize() {
256 table_.Register(kVisitShortcutCandidate,
257 &FixedBodyVisitor<StaticMarkingVisitor,
258 ConsString::BodyDescriptor,
259 void>::Visit);
260
261 table_.Register(kVisitConsString,
262 &FixedBodyVisitor<StaticMarkingVisitor,
263 ConsString::BodyDescriptor,
264 void>::Visit);
265
266
267 table_.Register(kVisitFixedArray,
268 &FlexibleBodyVisitor<StaticMarkingVisitor,
269 FixedArray::BodyDescriptor,
270 void>::Visit);
271
272 table_.Register(kVisitSharedFunctionInfo,
273 &FixedBodyVisitor<StaticMarkingVisitor,
274 SharedFunctionInfo::BodyDescriptor,
275 void>::Visit);
276
277 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
278 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
279 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
280
281 table_.Register(kVisitOddball,
282 &FixedBodyVisitor<StaticMarkingVisitor,
283 Oddball::BodyDescriptor,
284 void>::Visit);
285 table_.Register(kVisitMap,
286 &FixedBodyVisitor<StaticMarkingVisitor,
287 Map::BodyDescriptor,
288 void>::Visit);
289
290 table_.Register(kVisitCode, &VisitCode);
291
292 table_.Register(kVisitPropertyCell,
293 &FixedBodyVisitor<StaticMarkingVisitor,
294 JSGlobalPropertyCell::BodyDescriptor,
295 void>::Visit);
296
297 table_.RegisterSpecializations<DataObjectVisitor,
298 kVisitDataObject,
299 kVisitDataObjectGeneric>();
300
301 table_.RegisterSpecializations<JSObjectVisitor,
302 kVisitJSObject,
303 kVisitJSObjectGeneric>();
304
305 table_.RegisterSpecializations<StructObjectVisitor,
306 kVisitStruct,
307 kVisitStructGeneric>();
308 }
309
310 INLINE(static void VisitPointer(Object** p)) {
251 MarkObjectByPointer(p); 311 MarkObjectByPointer(p);
252 } 312 }
253 313
254 void VisitPointers(Object** start, Object** end) { 314 INLINE(static void VisitPointers(Object** start, Object** end)) {
255 // Mark all objects pointed to in [start, end). 315 // Mark all objects pointed to in [start, end).
256 const int kMinRangeForMarkingRecursion = 64; 316 const int kMinRangeForMarkingRecursion = 64;
257 if (end - start >= kMinRangeForMarkingRecursion) { 317 if (end - start >= kMinRangeForMarkingRecursion) {
258 if (VisitUnmarkedObjects(start, end)) return; 318 if (VisitUnmarkedObjects(start, end)) return;
259 // We are close to a stack overflow, so just mark the objects. 319 // We are close to a stack overflow, so just mark the objects.
260 } 320 }
261 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 321 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
262 } 322 }
263 323
264 void VisitCodeTarget(RelocInfo* rinfo) { 324 static inline void VisitCodeTarget(RelocInfo* rinfo) {
265 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 325 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
266 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); 326 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
267 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { 327 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
268 IC::Clear(rinfo->pc()); 328 IC::Clear(rinfo->pc());
269 // Please note targets for cleared inline cached do not have to be 329 // Please note targets for cleared inline cached do not have to be
270 // marked since they are contained in Heap::non_monomorphic_cache(). 330 // marked since they are contained in Heap::non_monomorphic_cache().
271 } else { 331 } else {
272 MarkCompactCollector::MarkObject(code); 332 MarkCompactCollector::MarkObject(code);
273 } 333 }
274 } 334 }
275 335
276 void VisitDebugTarget(RelocInfo* rinfo) { 336 static inline void VisitDebugTarget(RelocInfo* rinfo) {
277 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 337 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
278 rinfo->IsPatchedReturnSequence()) || 338 rinfo->IsPatchedReturnSequence()) ||
279 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 339 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
280 rinfo->IsPatchedDebugBreakSlotSequence())); 340 rinfo->IsPatchedDebugBreakSlotSequence()));
281 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); 341 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
282 MarkCompactCollector::MarkObject(code); 342 MarkCompactCollector::MarkObject(code);
283 } 343 }
284 344
285 private:
286 // Mark object pointed to by p. 345 // Mark object pointed to by p.
287 void MarkObjectByPointer(Object** p) { 346 INLINE(static void MarkObjectByPointer(Object** p)) {
288 if (!(*p)->IsHeapObject()) return; 347 if (!(*p)->IsHeapObject()) return;
289 HeapObject* object = ShortCircuitConsString(p); 348 HeapObject* object = ShortCircuitConsString(p);
290 MarkCompactCollector::MarkObject(object); 349 MarkCompactCollector::MarkObject(object);
291 } 350 }
292 351
293 // Tells whether the mark sweep collection will perform compaction.
294 bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
295
296 // Visit an unmarked object. 352 // Visit an unmarked object.
297 void VisitUnmarkedObject(HeapObject* obj) { 353 static inline void VisitUnmarkedObject(HeapObject* obj) {
298 #ifdef DEBUG 354 #ifdef DEBUG
299 ASSERT(Heap::Contains(obj)); 355 ASSERT(Heap::Contains(obj));
300 ASSERT(!obj->IsMarked()); 356 ASSERT(!obj->IsMarked());
301 #endif 357 #endif
302 Map* map = obj->map(); 358 Map* map = obj->map();
303 MarkCompactCollector::SetMark(obj); 359 MarkCompactCollector::SetMark(obj);
304 // Mark the map pointer and the body. 360 // Mark the map pointer and the body.
305 MarkCompactCollector::MarkObject(map); 361 MarkCompactCollector::MarkObject(map);
306 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); 362 IterateBody(map, obj);
307 } 363 }
308 364
309 // Visit all unmarked objects pointed to by [start, end). 365 // Visit all unmarked objects pointed to by [start, end).
310 // Returns false if the operation fails (lack of stack space). 366 // Returns false if the operation fails (lack of stack space).
311 inline bool VisitUnmarkedObjects(Object** start, Object** end) { 367 static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
312 // Return false is we are close to the stack limit. 368 // Return false is we are close to the stack limit.
313 StackLimitCheck check; 369 StackLimitCheck check;
314 if (check.HasOverflowed()) return false; 370 if (check.HasOverflowed()) return false;
315 371
316 // Visit the unmarked objects. 372 // Visit the unmarked objects.
317 for (Object** p = start; p < end; p++) { 373 for (Object** p = start; p < end; p++) {
318 if (!(*p)->IsHeapObject()) continue; 374 if (!(*p)->IsHeapObject()) continue;
319 HeapObject* obj = HeapObject::cast(*p); 375 HeapObject* obj = HeapObject::cast(*p);
320 if (obj->IsMarked()) continue; 376 if (obj->IsMarked()) continue;
321 VisitUnmarkedObject(obj); 377 VisitUnmarkedObject(obj);
322 } 378 }
323 return true; 379 return true;
324 } 380 }
381
382 static inline void VisitExternalReference(Address* p) { }
383 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
384
385 private:
386 class DataObjectVisitor {
387 public:
388 template<int size>
389 static void VisitSpecialized(Map* map, HeapObject* object) {
390 }
391
392 static void Visit(Map* map, HeapObject* object) {
393 }
394 };
395
396 typedef FlexibleBodyVisitor<StaticMarkingVisitor,
397 JSObject::BodyDescriptor,
398 void> JSObjectVisitor;
399
400 typedef FlexibleBodyVisitor<StaticMarkingVisitor,
401 StructBodyDescriptor,
402 void> StructObjectVisitor;
403
404 static void VisitCode(Map* map, HeapObject* object) {
405 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
406 }
407
408 typedef void (*Callback)(Map* map, HeapObject* object);
409
410 static VisitorDispatchTable<Callback> table_;
411 };
412
413
414 VisitorDispatchTable<StaticMarkingVisitor::Callback>
415 StaticMarkingVisitor::table_;
416
417
418 class MarkingVisitor : public ObjectVisitor {
419 public:
420 void VisitPointer(Object** p) {
421 StaticMarkingVisitor::VisitPointer(p);
422 }
423
424 void VisitPointers(Object** start, Object** end) {
425 StaticMarkingVisitor::VisitPointers(start, end);
426 }
427
428 void VisitCodeTarget(RelocInfo* rinfo) {
429 StaticMarkingVisitor::VisitCodeTarget(rinfo);
430 }
431
432 void VisitDebugTarget(RelocInfo* rinfo) {
433 StaticMarkingVisitor::VisitDebugTarget(rinfo);
434 }
325 }; 435 };
326 436
327 437
328 // Visitor class for marking heap roots. 438 // Visitor class for marking heap roots.
329 class RootMarkingVisitor : public ObjectVisitor { 439 class RootMarkingVisitor : public ObjectVisitor {
330 public: 440 public:
331 void VisitPointer(Object** p) { 441 void VisitPointer(Object** p) {
332 MarkObjectByPointer(p); 442 MarkObjectByPointer(p);
333 } 443 }
334 444
335 void VisitPointers(Object** start, Object** end) { 445 void VisitPointers(Object** start, Object** end) {
336 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 446 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
337 } 447 }
338 448
339 MarkingVisitor* stack_visitor() { return &stack_visitor_; }
340
341 private: 449 private:
342 MarkingVisitor stack_visitor_;
343
344 void MarkObjectByPointer(Object** p) { 450 void MarkObjectByPointer(Object** p) {
345 if (!(*p)->IsHeapObject()) return; 451 if (!(*p)->IsHeapObject()) return;
346 452
347 // Replace flat cons strings in place. 453 // Replace flat cons strings in place.
348 HeapObject* object = ShortCircuitConsString(p); 454 HeapObject* object = ShortCircuitConsString(p);
349 if (object->IsMarked()) return; 455 if (object->IsMarked()) return;
350 456
351 Map* map = object->map(); 457 Map* map = object->map();
352 // Mark the object. 458 // Mark the object.
353 MarkCompactCollector::SetMark(object); 459 MarkCompactCollector::SetMark(object);
460
354 // Mark the map pointer and body, and push them on the marking stack. 461 // Mark the map pointer and body, and push them on the marking stack.
355 MarkCompactCollector::MarkObject(map); 462 MarkCompactCollector::MarkObject(map);
356 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 463 StaticMarkingVisitor::IterateBody(map, object);
357 &stack_visitor_);
358 464
359 // Mark all the objects reachable from the map and body. May leave 465 // Mark all the objects reachable from the map and body. May leave
360 // overflowed objects in the heap. 466 // overflowed objects in the heap.
361 MarkCompactCollector::EmptyMarkingStack(&stack_visitor_); 467 MarkCompactCollector::EmptyMarkingStack();
362 } 468 }
363 }; 469 };
364 470
365 471
366 // Helper class for pruning the symbol table. 472 // Helper class for pruning the symbol table.
367 class SymbolTableCleaner : public ObjectVisitor { 473 class SymbolTableCleaner : public ObjectVisitor {
368 public: 474 public:
369 SymbolTableCleaner() : pointers_removed_(0) { } 475 SymbolTableCleaner() : pointers_removed_(0) { }
370 476
371 virtual void VisitPointers(Object** start, Object** end) { 477 virtual void VisitPointers(Object** start, Object** end) {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 } 524 }
419 525
420 526
421 void MarkCompactCollector::MarkMapContents(Map* map) { 527 void MarkCompactCollector::MarkMapContents(Map* map) {
422 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( 528 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
423 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); 529 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
424 530
425 // Mark the Object* fields of the Map. 531 // Mark the Object* fields of the Map.
426 // Since the descriptor array has been marked already, it is fine 532 // Since the descriptor array has been marked already, it is fine
427 // that one of these fields contains a pointer to it. 533 // that one of these fields contains a pointer to it.
428 MarkingVisitor visitor; // Has no state or contents. 534 Object** start_slot = HeapObject::RawField(map,
429 visitor.VisitPointers(HeapObject::RawField(map, 535 Map::kPointerFieldsBeginOffset);
430 Map::kPointerFieldsBeginOffset), 536
431 HeapObject::RawField(map, 537 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
432 Map::kPointerFieldsEndOffset)); 538
539 StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
433 } 540 }
434 541
435 542
436 void MarkCompactCollector::MarkDescriptorArray( 543 void MarkCompactCollector::MarkDescriptorArray(
437 DescriptorArray* descriptors) { 544 DescriptorArray* descriptors) {
438 if (descriptors->IsMarked()) return; 545 if (descriptors->IsMarked()) return;
439 // Empty descriptor array is marked as a root before any maps are marked. 546 // Empty descriptor array is marked as a root before any maps are marked.
440 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); 547 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
441 SetMark(descriptors); 548 SetMark(descriptors);
442 549
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
522 } 629 }
523 630
524 631
525 void MarkCompactCollector::MarkSymbolTable() { 632 void MarkCompactCollector::MarkSymbolTable() {
526 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); 633 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
527 // Mark the symbol table itself. 634 // Mark the symbol table itself.
528 SetMark(symbol_table); 635 SetMark(symbol_table);
529 // Explicitly mark the prefix. 636 // Explicitly mark the prefix.
530 MarkingVisitor marker; 637 MarkingVisitor marker;
531 symbol_table->IteratePrefix(&marker); 638 symbol_table->IteratePrefix(&marker);
532 ProcessMarkingStack(&marker); 639 ProcessMarkingStack();
533 } 640 }
534 641
535 642
536 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { 643 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
537 // Mark the heap roots including global variables, stack variables, 644 // Mark the heap roots including global variables, stack variables,
538 // etc., and all objects reachable from them. 645 // etc., and all objects reachable from them.
539 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 646 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
540 647
541 // Handle the symbol table specially. 648 // Handle the symbol table specially.
542 MarkSymbolTable(); 649 MarkSymbolTable();
543 650
544 // There may be overflowed objects in the heap. Visit them now. 651 // There may be overflowed objects in the heap. Visit them now.
545 while (marking_stack.overflowed()) { 652 while (marking_stack.overflowed()) {
546 RefillMarkingStack(); 653 RefillMarkingStack();
547 EmptyMarkingStack(visitor->stack_visitor()); 654 EmptyMarkingStack();
548 } 655 }
549 } 656 }
550 657
551 658
552 void MarkCompactCollector::MarkObjectGroups() { 659 void MarkCompactCollector::MarkObjectGroups() {
553 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); 660 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
554 661
555 for (int i = 0; i < object_groups->length(); i++) { 662 for (int i = 0; i < object_groups->length(); i++) {
556 ObjectGroup* entry = object_groups->at(i); 663 ObjectGroup* entry = object_groups->at(i);
557 if (entry == NULL) continue; 664 if (entry == NULL) continue;
(...skipping 22 matching lines...) Expand all
580 delete object_groups->at(i); 687 delete object_groups->at(i);
581 object_groups->at(i) = NULL; 688 object_groups->at(i) = NULL;
582 } 689 }
583 } 690 }
584 691
585 692
586 // Mark all objects reachable from the objects on the marking stack. 693 // Mark all objects reachable from the objects on the marking stack.
587 // Before: the marking stack contains zero or more heap object pointers. 694 // Before: the marking stack contains zero or more heap object pointers.
588 // After: the marking stack is empty, and all objects reachable from the 695 // After: the marking stack is empty, and all objects reachable from the
589 // marking stack have been marked, or are overflowed in the heap. 696 // marking stack have been marked, or are overflowed in the heap.
590 void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) { 697 void MarkCompactCollector::EmptyMarkingStack() {
591 while (!marking_stack.is_empty()) { 698 while (!marking_stack.is_empty()) {
592 HeapObject* object = marking_stack.Pop(); 699 HeapObject* object = marking_stack.Pop();
593 ASSERT(object->IsHeapObject()); 700 ASSERT(object->IsHeapObject());
594 ASSERT(Heap::Contains(object)); 701 ASSERT(Heap::Contains(object));
595 ASSERT(object->IsMarked()); 702 ASSERT(object->IsMarked());
596 ASSERT(!object->IsOverflowed()); 703 ASSERT(!object->IsOverflowed());
597 704
598 // Because the object is marked, we have to recover the original map 705 // Because the object is marked, we have to recover the original map
599 // pointer and use it to mark the object's body. 706 // pointer and use it to mark the object's body.
600 MapWord map_word = object->map_word(); 707 MapWord map_word = object->map_word();
601 map_word.ClearMark(); 708 map_word.ClearMark();
602 Map* map = map_word.ToMap(); 709 Map* map = map_word.ToMap();
603 MarkObject(map); 710 MarkObject(map);
604 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 711
605 visitor); 712 StaticMarkingVisitor::IterateBody(map, object);
606 } 713 }
607 } 714 }
608 715
609 716
610 // Sweep the heap for overflowed objects, clear their overflow bits, and 717 // Sweep the heap for overflowed objects, clear their overflow bits, and
611 // push them on the marking stack. Stop early if the marking stack fills 718 // push them on the marking stack. Stop early if the marking stack fills
612 // before sweeping completes. If sweeping completes, there are no remaining 719 // before sweeping completes. If sweeping completes, there are no remaining
613 // overflowed objects in the heap so the overflow flag on the markings stack 720 // overflowed objects in the heap so the overflow flag on the markings stack
614 // is cleared. 721 // is cleared.
615 void MarkCompactCollector::RefillMarkingStack() { 722 void MarkCompactCollector::RefillMarkingStack() {
(...skipping 29 matching lines...) Expand all
645 if (marking_stack.is_full()) return; 752 if (marking_stack.is_full()) return;
646 753
647 marking_stack.clear_overflowed(); 754 marking_stack.clear_overflowed();
648 } 755 }
649 756
650 757
651 // Mark all objects reachable (transitively) from objects on the marking 758 // Mark all objects reachable (transitively) from objects on the marking
652 // stack. Before: the marking stack contains zero or more heap object 759 // stack. Before: the marking stack contains zero or more heap object
653 // pointers. After: the marking stack is empty and there are no overflowed 760 // pointers. After: the marking stack is empty and there are no overflowed
654 // objects in the heap. 761 // objects in the heap.
655 void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) { 762 void MarkCompactCollector::ProcessMarkingStack() {
656 EmptyMarkingStack(visitor); 763 EmptyMarkingStack();
657 while (marking_stack.overflowed()) { 764 while (marking_stack.overflowed()) {
658 RefillMarkingStack(); 765 RefillMarkingStack();
659 EmptyMarkingStack(visitor); 766 EmptyMarkingStack();
660 } 767 }
661 } 768 }
662 769
663 770
664 void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) { 771 void MarkCompactCollector::ProcessObjectGroups() {
665 bool work_to_do = true; 772 bool work_to_do = true;
666 ASSERT(marking_stack.is_empty()); 773 ASSERT(marking_stack.is_empty());
667 while (work_to_do) { 774 while (work_to_do) {
668 MarkObjectGroups(); 775 MarkObjectGroups();
669 work_to_do = !marking_stack.is_empty(); 776 work_to_do = !marking_stack.is_empty();
670 ProcessMarkingStack(visitor); 777 ProcessMarkingStack();
671 } 778 }
672 } 779 }
673 780
674 781
675 void MarkCompactCollector::MarkLiveObjects() { 782 void MarkCompactCollector::MarkLiveObjects() {
676 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); 783 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
677 #ifdef DEBUG 784 #ifdef DEBUG
678 ASSERT(state_ == PREPARE_GC); 785 ASSERT(state_ == PREPARE_GC);
679 state_ = MARK_LIVE_OBJECTS; 786 state_ = MARK_LIVE_OBJECTS;
680 #endif 787 #endif
681 // The to space contains live objects, the from space is used as a marking 788 // The to space contains live objects, the from space is used as a marking
682 // stack. 789 // stack.
683 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(), 790 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
684 Heap::new_space()->FromSpaceHigh()); 791 Heap::new_space()->FromSpaceHigh());
685 792
686 ASSERT(!marking_stack.overflowed()); 793 ASSERT(!marking_stack.overflowed());
687 794
688 RootMarkingVisitor root_visitor; 795 RootMarkingVisitor root_visitor;
689 MarkRoots(&root_visitor); 796 MarkRoots(&root_visitor);
690 797
691 // The objects reachable from the roots are marked, yet unreachable 798 // The objects reachable from the roots are marked, yet unreachable
692 // objects are unmarked. Mark objects reachable from object groups 799 // objects are unmarked. Mark objects reachable from object groups
693 // containing at least one marked object, and continue until no new 800 // containing at least one marked object, and continue until no new
694 // objects are reachable from the object groups. 801 // objects are reachable from the object groups.
695 ProcessObjectGroups(root_visitor.stack_visitor()); 802 ProcessObjectGroups();
696 803
697 // The objects reachable from the roots or object groups are marked, 804 // The objects reachable from the roots or object groups are marked,
698 // yet unreachable objects are unmarked. Mark objects reachable 805 // yet unreachable objects are unmarked. Mark objects reachable
699 // only from weak global handles. 806 // only from weak global handles.
700 // 807 //
701 // First we identify nonlive weak handles and mark them as pending 808 // First we identify nonlive weak handles and mark them as pending
702 // destruction. 809 // destruction.
703 GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject); 810 GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
704 // Then we mark the objects and process the transitive closure. 811 // Then we mark the objects and process the transitive closure.
705 GlobalHandles::IterateWeakRoots(&root_visitor); 812 GlobalHandles::IterateWeakRoots(&root_visitor);
706 while (marking_stack.overflowed()) { 813 while (marking_stack.overflowed()) {
707 RefillMarkingStack(); 814 RefillMarkingStack();
708 EmptyMarkingStack(root_visitor.stack_visitor()); 815 EmptyMarkingStack();
709 } 816 }
710 817
711 // Repeat the object groups to mark unmarked groups reachable from the 818 // Repeat the object groups to mark unmarked groups reachable from the
712 // weak roots. 819 // weak roots.
713 ProcessObjectGroups(root_visitor.stack_visitor()); 820 ProcessObjectGroups();
714 821
715 // Prune the symbol table removing all symbols only pointed to by the 822 // Prune the symbol table removing all symbols only pointed to by the
716 // symbol table. Cannot use symbol_table() here because the symbol 823 // symbol table. Cannot use symbol_table() here because the symbol
717 // table is marked. 824 // table is marked.
718 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); 825 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
719 SymbolTableCleaner v; 826 SymbolTableCleaner v;
720 symbol_table->IterateElements(&v); 827 symbol_table->IterateElements(&v);
721 symbol_table->ElementsRemoved(v.PointersRemoved()); 828 symbol_table->ElementsRemoved(v.PointersRemoved());
722 ExternalStringTable::Iterate(&v); 829 ExternalStringTable::Iterate(&v);
723 ExternalStringTable::CleanUp(); 830 ExternalStringTable::CleanUp();
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
1084 if (to_old_space) { 1191 if (to_old_space) {
1085 Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size); 1192 Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1086 } else { 1193 } else {
1087 Heap::CopyBlock(dst, src, size); 1194 Heap::CopyBlock(dst, src, size);
1088 } 1195 }
1089 1196
1090 Memory::Address_at(src) = dst; 1197 Memory::Address_at(src) = dst;
1091 } 1198 }
1092 1199
1093 1200
1201 class StaticPointersToNewGenUpdatingVisitor : public
1202 StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
1203 public:
1204 static inline void VisitPointer(Object** p) {
1205 if (!(*p)->IsHeapObject()) return;
1206
1207 HeapObject* obj = HeapObject::cast(*p);
1208 Address old_addr = obj->address();
1209
1210 if (Heap::new_space()->Contains(obj)) {
1211 ASSERT(Heap::InFromSpace(*p));
1212 *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1213 }
1214 }
1215 };
1216
1217
1094 // Visitor for updating pointers from live objects in old spaces to new space. 1218 // Visitor for updating pointers from live objects in old spaces to new space.
1095 // It does not expect to encounter pointers to dead objects. 1219 // It does not expect to encounter pointers to dead objects.
1096 class PointersToNewGenUpdatingVisitor: public ObjectVisitor { 1220 class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
1097 public: 1221 public:
1098 void VisitPointer(Object** p) { 1222 void VisitPointer(Object** p) {
1099 UpdatePointer(p); 1223 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1100 } 1224 }
1101 1225
1102 void VisitPointers(Object** start, Object** end) { 1226 void VisitPointers(Object** start, Object** end) {
1103 for (Object** p = start; p < end; p++) UpdatePointer(p); 1227 for (Object** p = start; p < end; p++) {
1228 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1229 }
1104 } 1230 }
1105 1231
1106 void VisitCodeTarget(RelocInfo* rinfo) { 1232 void VisitCodeTarget(RelocInfo* rinfo) {
1107 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 1233 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1108 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 1234 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1109 VisitPointer(&target); 1235 VisitPointer(&target);
1110 rinfo->set_target_address(Code::cast(target)->instruction_start()); 1236 rinfo->set_target_address(Code::cast(target)->instruction_start());
1111 } 1237 }
1112 1238
1113 void VisitDebugTarget(RelocInfo* rinfo) { 1239 void VisitDebugTarget(RelocInfo* rinfo) {
1114 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 1240 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1115 rinfo->IsPatchedReturnSequence()) || 1241 rinfo->IsPatchedReturnSequence()) ||
1116 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 1242 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1117 rinfo->IsPatchedDebugBreakSlotSequence())); 1243 rinfo->IsPatchedDebugBreakSlotSequence()));
1118 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 1244 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1119 VisitPointer(&target); 1245 VisitPointer(&target);
1120 rinfo->set_call_address(Code::cast(target)->instruction_start()); 1246 rinfo->set_call_address(Code::cast(target)->instruction_start());
1121 } 1247 }
1122
1123 private:
1124 void UpdatePointer(Object** p) {
1125 if (!(*p)->IsHeapObject()) return;
1126
1127 HeapObject* obj = HeapObject::cast(*p);
1128 Address old_addr = obj->address();
1129
1130 if (Heap::new_space()->Contains(obj)) {
1131 ASSERT(Heap::InFromSpace(*p));
1132 *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1133 }
1134 }
1135 }; 1248 };
1136 1249
1137 1250
1138 // Visitor for updating pointers from live objects in old spaces to new space. 1251 // Visitor for updating pointers from live objects in old spaces to new space.
1139 // It can encounter pointers to dead objects in new space when traversing map 1252 // It can encounter pointers to dead objects in new space when traversing map
1140 // space (see comment for MigrateObject). 1253 // space (see comment for MigrateObject).
1141 static void UpdatePointerToNewGen(HeapObject** p) { 1254 static void UpdatePointerToNewGen(HeapObject** p) {
1142 if (!(*p)->IsHeapObject()) return; 1255 if (!(*p)->IsHeapObject()) return;
1143 1256
1144 Address old_addr = (*p)->address(); 1257 Address old_addr = (*p)->address();
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
1241 } else { 1354 } else {
1242 size = object->Size(); 1355 size = object->Size();
1243 Memory::Address_at(current) = NULL; 1356 Memory::Address_at(current) = NULL;
1244 } 1357 }
1245 } 1358 }
1246 1359
1247 // Second pass: find pointers to new space and update them. 1360 // Second pass: find pointers to new space and update them.
1248 PointersToNewGenUpdatingVisitor updating_visitor; 1361 PointersToNewGenUpdatingVisitor updating_visitor;
1249 1362
1250 // Update pointers in to space. 1363 // Update pointers in to space.
1251 HeapObject* object; 1364 Address current = space->bottom();
1252 for (Address current = space->bottom(); 1365 while (current < space->top()) {
1253 current < space->top(); 1366 HeapObject* object = HeapObject::FromAddress(current);
1254 current += object->Size()) { 1367 current +=
1255 object = HeapObject::FromAddress(current); 1368 StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
1256 1369 object);
1257 object->IterateBody(object->map()->instance_type(),
1258 object->Size(),
1259 &updating_visitor);
1260 } 1370 }
1261 1371
1262 // Update roots. 1372 // Update roots.
1263 Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE); 1373 Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
1264 1374
1265 // Update pointers in old spaces. 1375 // Update pointers in old spaces.
1266 Heap::IterateDirtyRegions(Heap::old_pointer_space(), 1376 Heap::IterateDirtyRegions(Heap::old_pointer_space(),
1267 &Heap::IteratePointersInDirtyRegion, 1377 &Heap::IteratePointersInDirtyRegion,
1268 &UpdatePointerToNewGen, 1378 &UpdatePointerToNewGen,
1269 Heap::WATERMARK_SHOULD_BE_VALID); 1379 Heap::WATERMARK_SHOULD_BE_VALID);
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after
1751 ASSERT(!IsCompacting()); 1861 ASSERT(!IsCompacting());
1752 // Noncompacting collections simply sweep the spaces to clear the mark 1862 // Noncompacting collections simply sweep the spaces to clear the mark
1753 // bits and free the nonlive blocks (for old and map spaces). We sweep 1863 // bits and free the nonlive blocks (for old and map spaces). We sweep
1754 // the map space last because freeing non-live maps overwrites them and 1864 // the map space last because freeing non-live maps overwrites them and
1755 // the other spaces rely on possibly non-live maps to get the sizes for 1865 // the other spaces rely on possibly non-live maps to get the sizes for
1756 // non-live objects. 1866 // non-live objects.
1757 SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock); 1867 SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock);
1758 SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock); 1868 SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
1759 SweepSpace(Heap::code_space(), &DeallocateCodeBlock); 1869 SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
1760 SweepSpace(Heap::cell_space(), &DeallocateCellBlock); 1870 SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
1761 SweepNewSpace(Heap::new_space()); 1871 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
1872 SweepNewSpace(Heap::new_space());
1873 }
1762 SweepSpace(Heap::map_space(), &DeallocateMapBlock); 1874 SweepSpace(Heap::map_space(), &DeallocateMapBlock);
1763 1875
1764 Heap::IterateDirtyRegions(Heap::map_space(), 1876 Heap::IterateDirtyRegions(Heap::map_space(),
1765 &Heap::IteratePointersInDirtyMapsRegion, 1877 &Heap::IteratePointersInDirtyMapsRegion,
1766 &UpdatePointerToNewGen, 1878 &UpdatePointerToNewGen,
1767 Heap::WATERMARK_SHOULD_BE_VALID); 1879 Heap::WATERMARK_SHOULD_BE_VALID);
1768 1880
1769 int live_maps_size = Heap::map_space()->Size(); 1881 int live_maps_size = Heap::map_space()->Size();
1770 int live_maps = live_maps_size / Map::kSize; 1882 int live_maps = live_maps_size / Map::kSize;
1771 ASSERT(live_map_objects_size_ == live_maps_size); 1883 ASSERT(live_map_objects_size_ == live_maps_size);
(...skipping 548 matching lines...) Expand 10 before | Expand all | Expand 10 after
2320 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { 2432 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
2321 #ifdef ENABLE_LOGGING_AND_PROFILING 2433 #ifdef ENABLE_LOGGING_AND_PROFILING
2322 if (obj->IsCode()) { 2434 if (obj->IsCode()) {
2323 PROFILE(CodeDeleteEvent(obj->address())); 2435 PROFILE(CodeDeleteEvent(obj->address()));
2324 } else if (obj->IsJSFunction()) { 2436 } else if (obj->IsJSFunction()) {
2325 PROFILE(FunctionDeleteEvent(obj->address())); 2437 PROFILE(FunctionDeleteEvent(obj->address()));
2326 } 2438 }
2327 #endif 2439 #endif
2328 } 2440 }
2329 2441
2442
2443 void MarkCompactCollector::Initialize() {
2444 StaticPointersToNewGenUpdatingVisitor::Initialize();
2445 StaticMarkingVisitor::Initialize();
2446 }
2447
2448
2330 } } // namespace v8::internal 2449 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698