Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(759)

Side by Side Diff: src/mark-compact.cc

Issue 6760025: Cleanup usage of HEAP in mark-compact. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Merge. Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/x64/assembler-x64-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
79 MarkLiveObjects(); 79 MarkLiveObjects();
80 80
81 if (FLAG_collect_maps) ClearNonLiveTransitions(); 81 if (FLAG_collect_maps) ClearNonLiveTransitions();
82 82
83 SweepLargeObjectSpace(); 83 SweepLargeObjectSpace();
84 84
85 if (IsCompacting()) { 85 if (IsCompacting()) {
86 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); 86 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
87 EncodeForwardingAddresses(); 87 EncodeForwardingAddresses();
88 88
89 heap_->MarkMapPointersAsEncoded(true); 89 heap()->MarkMapPointersAsEncoded(true);
90 UpdatePointers(); 90 UpdatePointers();
91 heap_->MarkMapPointersAsEncoded(false); 91 heap()->MarkMapPointersAsEncoded(false);
92 heap_->isolate()->pc_to_code_cache()->Flush(); 92 heap()->isolate()->pc_to_code_cache()->Flush();
93 93
94 RelocateObjects(); 94 RelocateObjects();
95 } else { 95 } else {
96 SweepSpaces(); 96 SweepSpaces();
97 heap_->isolate()->pc_to_code_cache()->Flush(); 97 heap()->isolate()->pc_to_code_cache()->Flush();
98 } 98 }
99 99
100 Finish(); 100 Finish();
101 101
102 // Save the count of marked objects remaining after the collection and 102 // Save the count of marked objects remaining after the collection and
103 // null out the GC tracer. 103 // null out the GC tracer.
104 previous_marked_count_ = tracer_->marked_count(); 104 previous_marked_count_ = tracer_->marked_count();
105 ASSERT(previous_marked_count_ == 0); 105 ASSERT(previous_marked_count_ == 0);
106 tracer_ = NULL; 106 tracer_ = NULL;
107 } 107 }
108 108
109 109
110 void MarkCompactCollector::Prepare(GCTracer* tracer) { 110 void MarkCompactCollector::Prepare(GCTracer* tracer) {
111 // Rather than passing the tracer around we stash it in a static member 111 // Rather than passing the tracer around we stash it in a static member
112 // variable. 112 // variable.
113 tracer_ = tracer; 113 tracer_ = tracer;
114 114
115 #ifdef DEBUG 115 #ifdef DEBUG
116 ASSERT(state_ == IDLE); 116 ASSERT(state_ == IDLE);
117 state_ = PREPARE_GC; 117 state_ = PREPARE_GC;
118 #endif 118 #endif
119 ASSERT(!FLAG_always_compact || !FLAG_never_compact); 119 ASSERT(!FLAG_always_compact || !FLAG_never_compact);
120 120
121 compacting_collection_ = 121 compacting_collection_ =
122 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; 122 FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
123 compact_on_next_gc_ = false; 123 compact_on_next_gc_ = false;
124 124
125 if (FLAG_never_compact) compacting_collection_ = false; 125 if (FLAG_never_compact) compacting_collection_ = false;
126 if (!HEAP->map_space()->MapPointersEncodable()) 126 if (!heap()->map_space()->MapPointersEncodable())
127 compacting_collection_ = false; 127 compacting_collection_ = false;
128 if (FLAG_collect_maps) CreateBackPointers(); 128 if (FLAG_collect_maps) CreateBackPointers();
129 #ifdef ENABLE_GDB_JIT_INTERFACE 129 #ifdef ENABLE_GDB_JIT_INTERFACE
130 if (FLAG_gdbjit) { 130 if (FLAG_gdbjit) {
131 // If GDBJIT interface is active disable compaction. 131 // If GDBJIT interface is active disable compaction.
132 compacting_collection_ = false; 132 compacting_collection_ = false;
133 } 133 }
134 #endif 134 #endif
135 135
136 PagedSpaces spaces; 136 PagedSpaces spaces;
(...skipping 17 matching lines...) Expand all
154 154
155 void MarkCompactCollector::Finish() { 155 void MarkCompactCollector::Finish() {
156 #ifdef DEBUG 156 #ifdef DEBUG
157 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 157 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
158 state_ = IDLE; 158 state_ = IDLE;
159 #endif 159 #endif
160 // The stub cache is not traversed during GC; clear the cache to 160 // The stub cache is not traversed during GC; clear the cache to
161 // force lazy re-initialization of it. This must be done after the 161 // force lazy re-initialization of it. This must be done after the
162 // GC, because it relies on the new address of certain old space 162 // GC, because it relies on the new address of certain old space
163 // objects (empty string, illegal builtin). 163 // objects (empty string, illegal builtin).
164 heap_->isolate()->stub_cache()->Clear(); 164 heap()->isolate()->stub_cache()->Clear();
165 165
166 heap_->external_string_table_.CleanUp(); 166 heap()->external_string_table_.CleanUp();
167 167
168 // If we've just compacted old space there's no reason to check the 168 // If we've just compacted old space there's no reason to check the
169 // fragmentation limit. Just return. 169 // fragmentation limit. Just return.
170 if (HasCompacted()) return; 170 if (HasCompacted()) return;
171 171
172 // We compact the old generation on the next GC if it has gotten too 172 // We compact the old generation on the next GC if it has gotten too
173 // fragmented (ie, we could recover an expected amount of space by 173 // fragmented (ie, we could recover an expected amount of space by
174 // reclaiming the waste and free list blocks). 174 // reclaiming the waste and free list blocks).
175 static const int kFragmentationLimit = 15; // Percent. 175 static const int kFragmentationLimit = 15; // Percent.
176 static const int kFragmentationAllowed = 1 * MB; // Absolute. 176 static const int kFragmentationAllowed = 1 * MB; // Absolute.
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 449 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
450 // Mark all objects pointed to in [start, end). 450 // Mark all objects pointed to in [start, end).
451 const int kMinRangeForMarkingRecursion = 64; 451 const int kMinRangeForMarkingRecursion = 64;
452 if (end - start >= kMinRangeForMarkingRecursion) { 452 if (end - start >= kMinRangeForMarkingRecursion) {
453 if (VisitUnmarkedObjects(heap, start, end)) return; 453 if (VisitUnmarkedObjects(heap, start, end)) return;
454 // We are close to a stack overflow, so just mark the objects. 454 // We are close to a stack overflow, so just mark the objects.
455 } 455 }
456 for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p); 456 for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
457 } 457 }
458 458
459 static inline void VisitCodeTarget(RelocInfo* rinfo) { 459 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
460 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 460 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
461 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); 461 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
462 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { 462 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
463 IC::Clear(rinfo->pc()); 463 IC::Clear(rinfo->pc());
464 // Please note targets for cleared inline cached do not have to be 464 // Please note targets for cleared inline cached do not have to be
465 // marked since they are contained in HEAP->non_monomorphic_cache(). 465 // marked since they are contained in HEAP->non_monomorphic_cache().
466 } else { 466 } else {
467 code->heap()->mark_compact_collector()->MarkObject(code); 467 heap->mark_compact_collector()->MarkObject(code);
468 } 468 }
469 } 469 }
470 470
471 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { 471 static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
472 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); 472 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
473 Object* cell = rinfo->target_cell(); 473 Object* cell = rinfo->target_cell();
474 Object* old_cell = cell; 474 Object* old_cell = cell;
475 VisitPointer(reinterpret_cast<JSGlobalPropertyCell*>(cell)->heap(), &cell); 475 VisitPointer(heap, &cell);
476 if (cell != old_cell) { 476 if (cell != old_cell) {
477 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); 477 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
478 } 478 }
479 } 479 }
480 480
481 static inline void VisitDebugTarget(RelocInfo* rinfo) { 481 static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
482 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 482 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
483 rinfo->IsPatchedReturnSequence()) || 483 rinfo->IsPatchedReturnSequence()) ||
484 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 484 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
485 rinfo->IsPatchedDebugBreakSlotSequence())); 485 rinfo->IsPatchedDebugBreakSlotSequence()));
486 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); 486 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
487 reinterpret_cast<Code*>(code)->heap()->mark_compact_collector()-> 487 heap->mark_compact_collector()->MarkObject(code);
488 MarkObject(code);
489 } 488 }
490 489
491 // Mark object pointed to by p. 490 // Mark object pointed to by p.
492 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { 491 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
493 if (!(*p)->IsHeapObject()) return; 492 if (!(*p)->IsHeapObject()) return;
494 HeapObject* object = ShortCircuitConsString(p); 493 HeapObject* object = ShortCircuitConsString(p);
495 if (!object->IsMarked()) { 494 if (!object->IsMarked()) {
496 heap->mark_compact_collector()->MarkUnmarkedObject(object); 495 heap->mark_compact_collector()->MarkUnmarkedObject(object);
497 } 496 }
498 } 497 }
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( 557 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
559 map->heap()); 558 map->heap());
560 } 559 }
561 560
562 // Code flushing support. 561 // Code flushing support.
563 562
564 // How many collections newly compiled code object will survive before being 563 // How many collections newly compiled code object will survive before being
565 // flushed. 564 // flushed.
566 static const int kCodeAgeThreshold = 5; 565 static const int kCodeAgeThreshold = 5;
567 566
568 inline static bool HasSourceCode(SharedFunctionInfo* info) { 567 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
569 Object* undefined = HEAP->raw_unchecked_undefined_value(); 568 Object* undefined = heap->raw_unchecked_undefined_value();
570 return (info->script() != undefined) && 569 return (info->script() != undefined) &&
571 (reinterpret_cast<Script*>(info->script())->source() != undefined); 570 (reinterpret_cast<Script*>(info->script())->source() != undefined);
572 } 571 }
573 572
574 573
575 inline static bool IsCompiled(JSFunction* function) { 574 inline static bool IsCompiled(JSFunction* function) {
576 return function->unchecked_code() != 575 return function->unchecked_code() !=
577 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); 576 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
578 } 577 }
579 578
580 inline static bool IsCompiled(SharedFunctionInfo* function) { 579 inline static bool IsCompiled(SharedFunctionInfo* function) {
581 return function->unchecked_code() != 580 return function->unchecked_code() !=
582 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); 581 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
583 } 582 }
584 583
585 inline static bool IsFlushable(JSFunction* function) { 584 inline static bool IsFlushable(Heap* heap, JSFunction* function) {
586 SharedFunctionInfo* shared_info = function->unchecked_shared(); 585 SharedFunctionInfo* shared_info = function->unchecked_shared();
587 586
588 // Code is either on stack, in compilation cache or referenced 587 // Code is either on stack, in compilation cache or referenced
589 // by optimized version of function. 588 // by optimized version of function.
590 if (function->unchecked_code()->IsMarked()) { 589 if (function->unchecked_code()->IsMarked()) {
591 shared_info->set_code_age(0); 590 shared_info->set_code_age(0);
592 return false; 591 return false;
593 } 592 }
594 593
595 // We do not flush code for optimized functions. 594 // We do not flush code for optimized functions.
596 if (function->code() != shared_info->unchecked_code()) { 595 if (function->code() != shared_info->unchecked_code()) {
597 return false; 596 return false;
598 } 597 }
599 598
600 return IsFlushable(shared_info); 599 return IsFlushable(heap, shared_info);
601 } 600 }
602 601
603 inline static bool IsFlushable(SharedFunctionInfo* shared_info) { 602 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
604 // Code is either on stack, in compilation cache or referenced 603 // Code is either on stack, in compilation cache or referenced
605 // by optimized version of function. 604 // by optimized version of function.
606 if (shared_info->unchecked_code()->IsMarked()) { 605 if (shared_info->unchecked_code()->IsMarked()) {
607 shared_info->set_code_age(0); 606 shared_info->set_code_age(0);
608 return false; 607 return false;
609 } 608 }
610 609
611 // The function must be compiled and have the source code available, 610 // The function must be compiled and have the source code available,
612 // to be able to recompile it in case we need the function again. 611 // to be able to recompile it in case we need the function again.
613 if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) { 612 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
614 return false; 613 return false;
615 } 614 }
616 615
617 // We never flush code for Api functions. 616 // We never flush code for Api functions.
618 Object* function_data = shared_info->function_data(); 617 Object* function_data = shared_info->function_data();
619 if (function_data->IsHeapObject() && 618 if (function_data->IsHeapObject() &&
620 (SafeMap(function_data)->instance_type() == 619 (SafeMap(function_data)->instance_type() ==
621 FUNCTION_TEMPLATE_INFO_TYPE)) { 620 FUNCTION_TEMPLATE_INFO_TYPE)) {
622 return false; 621 return false;
623 } 622 }
(...skipping 11 matching lines...) Expand all
635 if (shared_info->code_age() < kCodeAgeThreshold) { 634 if (shared_info->code_age() < kCodeAgeThreshold) {
636 shared_info->set_code_age(shared_info->code_age() + 1); 635 shared_info->set_code_age(shared_info->code_age() + 1);
637 return false; 636 return false;
638 } 637 }
639 638
640 return true; 639 return true;
641 } 640 }
642 641
643 642
644 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) { 643 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
645 if (!IsFlushable(function)) return false; 644 if (!IsFlushable(heap, function)) return false;
646 645
647 // This function's code looks flushable. But we have to postpone the 646 // This function's code looks flushable. But we have to postpone the
648 // decision until we see all functions that point to the same 647 // decision until we see all functions that point to the same
649 // SharedFunctionInfo because some of them might be optimized. 648 // SharedFunctionInfo because some of them might be optimized.
650 // That would make the nonoptimized version of the code nonflushable, 649 // That would make the nonoptimized version of the code nonflushable,
651 // because it is required for bailing out from optimized code. 650 // because it is required for bailing out from optimized code.
652 heap->mark_compact_collector()->code_flusher()->AddCandidate(function); 651 heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
653 return true; 652 return true;
654 } 653 }
655 654
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
712 711
713 712
714 static void VisitSharedFunctionInfoAndFlushCodeGeneric( 713 static void VisitSharedFunctionInfoAndFlushCodeGeneric(
715 Map* map, HeapObject* object, bool known_flush_code_candidate) { 714 Map* map, HeapObject* object, bool known_flush_code_candidate) {
716 Heap* heap = map->heap(); 715 Heap* heap = map->heap();
717 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); 716 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
718 717
719 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); 718 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
720 719
721 if (!known_flush_code_candidate) { 720 if (!known_flush_code_candidate) {
722 known_flush_code_candidate = IsFlushable(shared); 721 known_flush_code_candidate = IsFlushable(heap, shared);
723 if (known_flush_code_candidate) { 722 if (known_flush_code_candidate) {
724 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); 723 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
725 } 724 }
726 } 725 }
727 726
728 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); 727 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
729 } 728 }
730 729
731 730
732 static void VisitCodeEntry(Heap* heap, Address entry_address) { 731 static void VisitCodeEntry(Heap* heap, Address entry_address) {
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
862 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } 861 explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
863 862
864 void VisitPointer(Object** p) { 863 void VisitPointer(Object** p) {
865 StaticMarkingVisitor::VisitPointer(heap_, p); 864 StaticMarkingVisitor::VisitPointer(heap_, p);
866 } 865 }
867 866
868 void VisitPointers(Object** start, Object** end) { 867 void VisitPointers(Object** start, Object** end) {
869 StaticMarkingVisitor::VisitPointers(heap_, start, end); 868 StaticMarkingVisitor::VisitPointers(heap_, start, end);
870 } 869 }
871 870
872 void VisitCodeTarget(RelocInfo* rinfo) { 871 void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
873 StaticMarkingVisitor::VisitCodeTarget(rinfo); 872 StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
874 } 873 }
875 874
876 void VisitGlobalPropertyCell(RelocInfo* rinfo) { 875 void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
877 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo); 876 StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
878 } 877 }
879 878
880 void VisitDebugTarget(RelocInfo* rinfo) { 879 void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
881 StaticMarkingVisitor::VisitDebugTarget(rinfo); 880 StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
882 } 881 }
883 882
884 private: 883 private:
885 Heap* heap_; 884 Heap* heap_;
886 }; 885 };
887 886
888 887
889 class CodeMarkingVisitor : public ThreadVisitor { 888 class CodeMarkingVisitor : public ThreadVisitor {
890 public: 889 public:
891 explicit CodeMarkingVisitor(MarkCompactCollector* collector) 890 explicit CodeMarkingVisitor(MarkCompactCollector* collector)
(...skipping 27 matching lines...) Expand all
919 collector_->MarkObject(shared); 918 collector_->MarkObject(shared);
920 } 919 }
921 } 920 }
922 921
923 private: 922 private:
924 MarkCompactCollector* collector_; 923 MarkCompactCollector* collector_;
925 }; 924 };
926 925
927 926
928 void MarkCompactCollector::PrepareForCodeFlushing() { 927 void MarkCompactCollector::PrepareForCodeFlushing() {
929 ASSERT(heap_ == Isolate::Current()->heap()); 928 ASSERT(heap() == Isolate::Current()->heap());
930 929
931 if (!FLAG_flush_code) { 930 if (!FLAG_flush_code) {
932 EnableCodeFlushing(false); 931 EnableCodeFlushing(false);
933 return; 932 return;
934 } 933 }
935 934
936 #ifdef ENABLE_DEBUGGER_SUPPORT 935 #ifdef ENABLE_DEBUGGER_SUPPORT
937 if (heap_->isolate()->debug()->IsLoaded() || 936 if (heap()->isolate()->debug()->IsLoaded() ||
938 heap_->isolate()->debug()->has_break_points()) { 937 heap()->isolate()->debug()->has_break_points()) {
939 EnableCodeFlushing(false); 938 EnableCodeFlushing(false);
940 return; 939 return;
941 } 940 }
942 #endif 941 #endif
943 EnableCodeFlushing(true); 942 EnableCodeFlushing(true);
944 943
945 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray 944 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
946 // relies on it being marked before any other descriptor array. 945 // relies on it being marked before any other descriptor array.
947 MarkObject(heap_->raw_unchecked_empty_descriptor_array()); 946 MarkObject(heap()->raw_unchecked_empty_descriptor_array());
948 947
949 // Make sure we are not referencing the code from the stack. 948 // Make sure we are not referencing the code from the stack.
950 ASSERT(this == heap_->mark_compact_collector()); 949 ASSERT(this == heap()->mark_compact_collector());
951 for (StackFrameIterator it; !it.done(); it.Advance()) { 950 for (StackFrameIterator it; !it.done(); it.Advance()) {
952 MarkObject(it.frame()->unchecked_code()); 951 MarkObject(it.frame()->unchecked_code());
953 } 952 }
954 953
955 // Iterate the archived stacks in all threads to check if 954 // Iterate the archived stacks in all threads to check if
956 // the code is referenced. 955 // the code is referenced.
957 CodeMarkingVisitor code_marking_visitor(this); 956 CodeMarkingVisitor code_marking_visitor(this);
958 heap_->isolate()->thread_manager()->IterateArchivedThreads( 957 heap()->isolate()->thread_manager()->IterateArchivedThreads(
959 &code_marking_visitor); 958 &code_marking_visitor);
960 959
961 SharedFunctionInfoMarkingVisitor visitor(this); 960 SharedFunctionInfoMarkingVisitor visitor(this);
962 heap_->isolate()->compilation_cache()->IterateFunctions(&visitor); 961 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
963 heap_->isolate()->handle_scope_implementer()->Iterate(&visitor); 962 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
964 963
965 ProcessMarkingStack(); 964 ProcessMarkingStack();
966 } 965 }
967 966
968 967
969 // Visitor class for marking heap roots. 968 // Visitor class for marking heap roots.
970 class RootMarkingVisitor : public ObjectVisitor { 969 class RootMarkingVisitor : public ObjectVisitor {
971 public: 970 public:
972 explicit RootMarkingVisitor(Heap* heap) 971 explicit RootMarkingVisitor(Heap* heap)
973 : collector_(heap->mark_compact_collector()) { } 972 : collector_(heap->mark_compact_collector()) { }
(...skipping 27 matching lines...) Expand all
1001 collector_->EmptyMarkingStack(); 1000 collector_->EmptyMarkingStack();
1002 } 1001 }
1003 1002
1004 MarkCompactCollector* collector_; 1003 MarkCompactCollector* collector_;
1005 }; 1004 };
1006 1005
1007 1006
1008 // Helper class for pruning the symbol table. 1007 // Helper class for pruning the symbol table.
1009 class SymbolTableCleaner : public ObjectVisitor { 1008 class SymbolTableCleaner : public ObjectVisitor {
1010 public: 1009 public:
1011 SymbolTableCleaner() : pointers_removed_(0) { } 1010 explicit SymbolTableCleaner(Heap* heap)
1011 : heap_(heap), pointers_removed_(0) { }
1012 1012
1013 virtual void VisitPointers(Object** start, Object** end) { 1013 virtual void VisitPointers(Object** start, Object** end) {
1014 // Visit all HeapObject pointers in [start, end). 1014 // Visit all HeapObject pointers in [start, end).
1015 for (Object** p = start; p < end; p++) { 1015 for (Object** p = start; p < end; p++) {
1016 if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) { 1016 if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
1017 // Check if the symbol being pruned is an external symbol. We need to 1017 // Check if the symbol being pruned is an external symbol. We need to
1018 // delete the associated external data as this symbol is going away. 1018 // delete the associated external data as this symbol is going away.
1019 1019
1020 // Since no objects have yet been moved we can safely access the map of 1020 // Since no objects have yet been moved we can safely access the map of
1021 // the object. 1021 // the object.
1022 if ((*p)->IsExternalString()) { 1022 if ((*p)->IsExternalString()) {
1023 HEAP->FinalizeExternalString(String::cast(*p)); 1023 heap_->FinalizeExternalString(String::cast(*p));
1024 } 1024 }
1025 // Set the entry to null_value (as deleted). 1025 // Set the entry to null_value (as deleted).
1026 *p = HEAP->raw_unchecked_null_value(); 1026 *p = heap_->raw_unchecked_null_value();
1027 pointers_removed_++; 1027 pointers_removed_++;
1028 } 1028 }
1029 } 1029 }
1030 } 1030 }
1031 1031
1032 int PointersRemoved() { 1032 int PointersRemoved() {
1033 return pointers_removed_; 1033 return pointers_removed_;
1034 } 1034 }
1035 private: 1035 private:
1036 Heap* heap_;
1036 int pointers_removed_; 1037 int pointers_removed_;
1037 }; 1038 };
1038 1039
1039 1040
1040 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects 1041 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
1041 // are retained. 1042 // are retained.
1042 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { 1043 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1043 public: 1044 public:
1044 virtual Object* RetainAs(Object* object) { 1045 virtual Object* RetainAs(Object* object) {
1045 MapWord first_word = HeapObject::cast(object)->map_word(); 1046 MapWord first_word = HeapObject::cast(object)->map_word();
1046 if (first_word.IsMarked()) { 1047 if (first_word.IsMarked()) {
1047 return object; 1048 return object;
1048 } else { 1049 } else {
1049 return NULL; 1050 return NULL;
1050 } 1051 }
1051 } 1052 }
1052 }; 1053 };
1053 1054
1054 1055
1055 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { 1056 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
1056 ASSERT(!object->IsMarked()); 1057 ASSERT(!object->IsMarked());
1057 ASSERT(HEAP->Contains(object)); 1058 ASSERT(HEAP->Contains(object));
1058 if (object->IsMap()) { 1059 if (object->IsMap()) {
1059 Map* map = Map::cast(object); 1060 Map* map = Map::cast(object);
1060 if (FLAG_cleanup_caches_in_maps_at_gc) { 1061 if (FLAG_cleanup_caches_in_maps_at_gc) {
1061 map->ClearCodeCache(heap_); 1062 map->ClearCodeCache(heap());
1062 } 1063 }
1063 SetMark(map); 1064 SetMark(map);
1064 if (FLAG_collect_maps && 1065 if (FLAG_collect_maps &&
1065 map->instance_type() >= FIRST_JS_OBJECT_TYPE && 1066 map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1066 map->instance_type() <= JS_FUNCTION_TYPE) { 1067 map->instance_type() <= JS_FUNCTION_TYPE) {
1067 MarkMapContents(map); 1068 MarkMapContents(map);
1068 } else { 1069 } else {
1069 marking_stack_.Push(map); 1070 marking_stack_.Push(map);
1070 } 1071 }
1071 } else { 1072 } else {
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1122 } 1123 }
1123 } 1124 }
1124 } 1125 }
1125 // The DescriptorArray descriptors contains a pointer to its contents array, 1126 // The DescriptorArray descriptors contains a pointer to its contents array,
1126 // but the contents array is already marked. 1127 // but the contents array is already marked.
1127 marking_stack_.Push(descriptors); 1128 marking_stack_.Push(descriptors);
1128 } 1129 }
1129 1130
1130 1131
1131 void MarkCompactCollector::CreateBackPointers() { 1132 void MarkCompactCollector::CreateBackPointers() {
1132 HeapObjectIterator iterator(HEAP->map_space()); 1133 HeapObjectIterator iterator(heap()->map_space());
1133 for (HeapObject* next_object = iterator.next(); 1134 for (HeapObject* next_object = iterator.next();
1134 next_object != NULL; next_object = iterator.next()) { 1135 next_object != NULL; next_object = iterator.next()) {
1135 if (next_object->IsMap()) { // Could also be ByteArray on free list. 1136 if (next_object->IsMap()) { // Could also be ByteArray on free list.
1136 Map* map = Map::cast(next_object); 1137 Map* map = Map::cast(next_object);
1137 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && 1138 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1138 map->instance_type() <= JS_FUNCTION_TYPE) { 1139 map->instance_type() <= JS_FUNCTION_TYPE) {
1139 map->CreateBackPointers(); 1140 map->CreateBackPointers();
1140 } else { 1141 } else {
1141 ASSERT(map->instance_descriptors() == HEAP->empty_descriptor_array()); 1142 ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
1142 } 1143 }
1143 } 1144 }
1144 } 1145 }
1145 } 1146 }
1146 1147
1147 1148
1148 static int OverflowObjectSize(HeapObject* obj) { 1149 static int OverflowObjectSize(HeapObject* obj) {
1149 // Recover the normal map pointer, it might be marked as live and 1150 // Recover the normal map pointer, it might be marked as live and
1150 // overflowed. 1151 // overflowed.
1151 MapWord map_word = obj->map_word(); 1152 MapWord map_word = obj->map_word();
(...skipping 27 matching lines...) Expand all
1179 } 1180 }
1180 }; 1181 };
1181 1182
1182 1183
1183 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { 1184 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1184 return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked(); 1185 return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
1185 } 1186 }
1186 1187
1187 1188
1188 void MarkCompactCollector::MarkSymbolTable() { 1189 void MarkCompactCollector::MarkSymbolTable() {
1189 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); 1190 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1190 // Mark the symbol table itself. 1191 // Mark the symbol table itself.
1191 SetMark(symbol_table); 1192 SetMark(symbol_table);
1192 // Explicitly mark the prefix. 1193 // Explicitly mark the prefix.
1193 MarkingVisitor marker(heap_); 1194 MarkingVisitor marker(heap());
1194 symbol_table->IteratePrefix(&marker); 1195 symbol_table->IteratePrefix(&marker);
1195 ProcessMarkingStack(); 1196 ProcessMarkingStack();
1196 } 1197 }
1197 1198
1198 1199
1199 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { 1200 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1200 // Mark the heap roots including global variables, stack variables, 1201 // Mark the heap roots including global variables, stack variables,
1201 // etc., and all objects reachable from them. 1202 // etc., and all objects reachable from them.
1202 HEAP->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 1203 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1203 1204
1204 // Handle the symbol table specially. 1205 // Handle the symbol table specially.
1205 MarkSymbolTable(); 1206 MarkSymbolTable();
1206 1207
1207 // There may be overflowed objects in the heap. Visit them now. 1208 // There may be overflowed objects in the heap. Visit them now.
1208 while (marking_stack_.overflowed()) { 1209 while (marking_stack_.overflowed()) {
1209 RefillMarkingStack(); 1210 RefillMarkingStack();
1210 EmptyMarkingStack(); 1211 EmptyMarkingStack();
1211 } 1212 }
1212 } 1213 }
1213 1214
1214 1215
1215 void MarkCompactCollector::MarkObjectGroups() { 1216 void MarkCompactCollector::MarkObjectGroups() {
1216 List<ObjectGroup*>* object_groups = 1217 List<ObjectGroup*>* object_groups =
1217 heap_->isolate()->global_handles()->object_groups(); 1218 heap()->isolate()->global_handles()->object_groups();
1218 1219
1219 for (int i = 0; i < object_groups->length(); i++) { 1220 for (int i = 0; i < object_groups->length(); i++) {
1220 ObjectGroup* entry = object_groups->at(i); 1221 ObjectGroup* entry = object_groups->at(i);
1221 if (entry == NULL) continue; 1222 if (entry == NULL) continue;
1222 1223
1223 List<Object**>& objects = entry->objects_; 1224 List<Object**>& objects = entry->objects_;
1224 bool group_marked = false; 1225 bool group_marked = false;
1225 for (int j = 0; j < objects.length(); j++) { 1226 for (int j = 0; j < objects.length(); j++) {
1226 Object* object = *objects[j]; 1227 Object* object = *objects[j];
1227 if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) { 1228 if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
(...skipping 15 matching lines...) Expand all
1243 // Once the entire group has been colored gray, set the object group 1244 // Once the entire group has been colored gray, set the object group
1244 // to NULL so it won't be processed again. 1245 // to NULL so it won't be processed again.
1245 delete entry; 1246 delete entry;
1246 object_groups->at(i) = NULL; 1247 object_groups->at(i) = NULL;
1247 } 1248 }
1248 } 1249 }
1249 1250
1250 1251
1251 void MarkCompactCollector::MarkImplicitRefGroups() { 1252 void MarkCompactCollector::MarkImplicitRefGroups() {
1252 List<ImplicitRefGroup*>* ref_groups = 1253 List<ImplicitRefGroup*>* ref_groups =
1253 heap_->isolate()->global_handles()->implicit_ref_groups(); 1254 heap()->isolate()->global_handles()->implicit_ref_groups();
1254 1255
1255 for (int i = 0; i < ref_groups->length(); i++) { 1256 for (int i = 0; i < ref_groups->length(); i++) {
1256 ImplicitRefGroup* entry = ref_groups->at(i); 1257 ImplicitRefGroup* entry = ref_groups->at(i);
1257 if (entry == NULL) continue; 1258 if (entry == NULL) continue;
1258 1259
1259 if (!entry->parent_->IsMarked()) continue; 1260 if (!entry->parent_->IsMarked()) continue;
1260 1261
1261 List<Object**>& children = entry->children_; 1262 List<Object**>& children = entry->children_;
1262 // A parent object is marked, so mark as gray all child white heap 1263 // A parent object is marked, so mark as gray all child white heap
1263 // objects. 1264 // objects.
(...skipping 12 matching lines...) Expand all
1276 1277
1277 1278
1278 // Mark all objects reachable from the objects on the marking stack. 1279 // Mark all objects reachable from the objects on the marking stack.
1279 // Before: the marking stack contains zero or more heap object pointers. 1280 // Before: the marking stack contains zero or more heap object pointers.
1280 // After: the marking stack is empty, and all objects reachable from the 1281 // After: the marking stack is empty, and all objects reachable from the
1281 // marking stack have been marked, or are overflowed in the heap. 1282 // marking stack have been marked, or are overflowed in the heap.
1282 void MarkCompactCollector::EmptyMarkingStack() { 1283 void MarkCompactCollector::EmptyMarkingStack() {
1283 while (!marking_stack_.is_empty()) { 1284 while (!marking_stack_.is_empty()) {
1284 HeapObject* object = marking_stack_.Pop(); 1285 HeapObject* object = marking_stack_.Pop();
1285 ASSERT(object->IsHeapObject()); 1286 ASSERT(object->IsHeapObject());
1286 ASSERT(heap_->Contains(object)); 1287 ASSERT(heap()->Contains(object));
1287 ASSERT(object->IsMarked()); 1288 ASSERT(object->IsMarked());
1288 ASSERT(!object->IsOverflowed()); 1289 ASSERT(!object->IsOverflowed());
1289 1290
1290 // Because the object is marked, we have to recover the original map 1291 // Because the object is marked, we have to recover the original map
1291 // pointer and use it to mark the object's body. 1292 // pointer and use it to mark the object's body.
1292 MapWord map_word = object->map_word(); 1293 MapWord map_word = object->map_word();
1293 map_word.ClearMark(); 1294 map_word.ClearMark();
1294 Map* map = map_word.ToMap(); 1295 Map* map = map_word.ToMap();
1295 MarkObject(map); 1296 MarkObject(map);
1296 1297
1297 StaticMarkingVisitor::IterateBody(map, object); 1298 StaticMarkingVisitor::IterateBody(map, object);
1298 } 1299 }
1299 } 1300 }
1300 1301
1301 1302
1302 // Sweep the heap for overflowed objects, clear their overflow bits, and 1303 // Sweep the heap for overflowed objects, clear their overflow bits, and
1303 // push them on the marking stack. Stop early if the marking stack fills 1304 // push them on the marking stack. Stop early if the marking stack fills
1304 // before sweeping completes. If sweeping completes, there are no remaining 1305 // before sweeping completes. If sweeping completes, there are no remaining
1305 // overflowed objects in the heap so the overflow flag on the markings stack 1306 // overflowed objects in the heap so the overflow flag on the markings stack
1306 // is cleared. 1307 // is cleared.
1307 void MarkCompactCollector::RefillMarkingStack() { 1308 void MarkCompactCollector::RefillMarkingStack() {
1308 ASSERT(marking_stack_.overflowed()); 1309 ASSERT(marking_stack_.overflowed());
1309 1310
1310 SemiSpaceIterator new_it(HEAP->new_space(), &OverflowObjectSize); 1311 SemiSpaceIterator new_it(heap()->new_space(), &OverflowObjectSize);
1311 OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it); 1312 OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it);
1312 if (marking_stack_.is_full()) return; 1313 if (marking_stack_.is_full()) return;
1313 1314
1314 HeapObjectIterator old_pointer_it(HEAP->old_pointer_space(), 1315 HeapObjectIterator old_pointer_it(heap()->old_pointer_space(),
1315 &OverflowObjectSize); 1316 &OverflowObjectSize);
1316 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it); 1317 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it);
1317 if (marking_stack_.is_full()) return; 1318 if (marking_stack_.is_full()) return;
1318 1319
1319 HeapObjectIterator old_data_it(HEAP->old_data_space(), &OverflowObjectSize); 1320 HeapObjectIterator old_data_it(heap()->old_data_space(), &OverflowObjectSize);
1320 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it); 1321 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it);
1321 if (marking_stack_.is_full()) return; 1322 if (marking_stack_.is_full()) return;
1322 1323
1323 HeapObjectIterator code_it(HEAP->code_space(), &OverflowObjectSize); 1324 HeapObjectIterator code_it(heap()->code_space(), &OverflowObjectSize);
1324 OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it); 1325 OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it);
1325 if (marking_stack_.is_full()) return; 1326 if (marking_stack_.is_full()) return;
1326 1327
1327 HeapObjectIterator map_it(HEAP->map_space(), &OverflowObjectSize); 1328 HeapObjectIterator map_it(heap()->map_space(), &OverflowObjectSize);
1328 OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it); 1329 OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it);
1329 if (marking_stack_.is_full()) return; 1330 if (marking_stack_.is_full()) return;
1330 1331
1331 HeapObjectIterator cell_it(HEAP->cell_space(), &OverflowObjectSize); 1332 HeapObjectIterator cell_it(heap()->cell_space(), &OverflowObjectSize);
1332 OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it); 1333 OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it);
1333 if (marking_stack_.is_full()) return; 1334 if (marking_stack_.is_full()) return;
1334 1335
1335 LargeObjectIterator lo_it(HEAP->lo_space(), &OverflowObjectSize); 1336 LargeObjectIterator lo_it(heap()->lo_space(), &OverflowObjectSize);
1336 OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it); 1337 OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it);
1337 if (marking_stack_.is_full()) return; 1338 if (marking_stack_.is_full()) return;
1338 1339
1339 marking_stack_.clear_overflowed(); 1340 marking_stack_.clear_overflowed();
1340 } 1341 }
1341 1342
1342 1343
1343 // Mark all objects reachable (transitively) from objects on the marking 1344 // Mark all objects reachable (transitively) from objects on the marking
1344 // stack. Before: the marking stack contains zero or more heap object 1345 // stack. Before: the marking stack contains zero or more heap object
1345 // pointers. After: the marking stack is empty and there are no overflowed 1346 // pointers. After: the marking stack is empty and there are no overflowed
(...skipping 17 matching lines...) Expand all
1363 ProcessMarkingStack(); 1364 ProcessMarkingStack();
1364 } 1365 }
1365 } 1366 }
1366 1367
1367 1368
1368 void MarkCompactCollector::MarkLiveObjects() { 1369 void MarkCompactCollector::MarkLiveObjects() {
1369 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); 1370 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
1370 // The recursive GC marker detects when it is nearing stack overflow, 1371 // The recursive GC marker detects when it is nearing stack overflow,
1371 // and switches to a different marking system. JS interrupts interfere 1372 // and switches to a different marking system. JS interrupts interfere
1372 // with the C stack limit check. 1373 // with the C stack limit check.
1373 PostponeInterruptsScope postpone(heap_->isolate()); 1374 PostponeInterruptsScope postpone(heap()->isolate());
1374 1375
1375 #ifdef DEBUG 1376 #ifdef DEBUG
1376 ASSERT(state_ == PREPARE_GC); 1377 ASSERT(state_ == PREPARE_GC);
1377 state_ = MARK_LIVE_OBJECTS; 1378 state_ = MARK_LIVE_OBJECTS;
1378 #endif 1379 #endif
1379 // The to space contains live objects, the from space is used as a marking 1380 // The to space contains live objects, the from space is used as a marking
1380 // stack. 1381 // stack.
1381 marking_stack_.Initialize(heap_->new_space()->FromSpaceLow(), 1382 marking_stack_.Initialize(heap()->new_space()->FromSpaceLow(),
1382 heap_->new_space()->FromSpaceHigh()); 1383 heap()->new_space()->FromSpaceHigh());
1383 1384
1384 ASSERT(!marking_stack_.overflowed()); 1385 ASSERT(!marking_stack_.overflowed());
1385 1386
1386 PrepareForCodeFlushing(); 1387 PrepareForCodeFlushing();
1387 1388
1388 RootMarkingVisitor root_visitor(heap_); 1389 RootMarkingVisitor root_visitor(heap());
1389 MarkRoots(&root_visitor); 1390 MarkRoots(&root_visitor);
1390 1391
1391 // The objects reachable from the roots are marked, yet unreachable 1392 // The objects reachable from the roots are marked, yet unreachable
1392 // objects are unmarked. Mark objects reachable due to host 1393 // objects are unmarked. Mark objects reachable due to host
1393 // application specific logic. 1394 // application specific logic.
1394 ProcessExternalMarking(); 1395 ProcessExternalMarking();
1395 1396
1396 // The objects reachable from the roots or object groups are marked, 1397 // The objects reachable from the roots or object groups are marked,
1397 // yet unreachable objects are unmarked. Mark objects reachable 1398 // yet unreachable objects are unmarked. Mark objects reachable
1398 // only from weak global handles. 1399 // only from weak global handles.
1399 // 1400 //
1400 // First we identify nonlive weak handles and mark them as pending 1401 // First we identify nonlive weak handles and mark them as pending
1401 // destruction. 1402 // destruction.
1402 heap_->isolate()->global_handles()->IdentifyWeakHandles( 1403 heap()->isolate()->global_handles()->IdentifyWeakHandles(
1403 &IsUnmarkedHeapObject); 1404 &IsUnmarkedHeapObject);
1404 // Then we mark the objects and process the transitive closure. 1405 // Then we mark the objects and process the transitive closure.
1405 heap_->isolate()->global_handles()->IterateWeakRoots(&root_visitor); 1406 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
1406 while (marking_stack_.overflowed()) { 1407 while (marking_stack_.overflowed()) {
1407 RefillMarkingStack(); 1408 RefillMarkingStack();
1408 EmptyMarkingStack(); 1409 EmptyMarkingStack();
1409 } 1410 }
1410 1411
1411 // Repeat host application specific marking to mark unmarked objects 1412 // Repeat host application specific marking to mark unmarked objects
1412 // reachable from the weak roots. 1413 // reachable from the weak roots.
1413 ProcessExternalMarking(); 1414 ProcessExternalMarking();
1414 1415
1415 // Prune the symbol table removing all symbols only pointed to by the 1416 // Prune the symbol table removing all symbols only pointed to by the
1416 // symbol table. Cannot use symbol_table() here because the symbol 1417 // symbol table. Cannot use symbol_table() here because the symbol
1417 // table is marked. 1418 // table is marked.
1418 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); 1419 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1419 SymbolTableCleaner v; 1420 SymbolTableCleaner v(heap());
1420 symbol_table->IterateElements(&v); 1421 symbol_table->IterateElements(&v);
1421 symbol_table->ElementsRemoved(v.PointersRemoved()); 1422 symbol_table->ElementsRemoved(v.PointersRemoved());
1422 heap_->external_string_table_.Iterate(&v); 1423 heap()->external_string_table_.Iterate(&v);
1423 heap_->external_string_table_.CleanUp(); 1424 heap()->external_string_table_.CleanUp();
1424 1425
1425 // Process the weak references. 1426 // Process the weak references.
1426 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 1427 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1427 heap_->ProcessWeakReferences(&mark_compact_object_retainer); 1428 heap()->ProcessWeakReferences(&mark_compact_object_retainer);
1428 1429
1429 // Remove object groups after marking phase. 1430 // Remove object groups after marking phase.
1430 heap_->isolate()->global_handles()->RemoveObjectGroups(); 1431 heap()->isolate()->global_handles()->RemoveObjectGroups();
1431 heap_->isolate()->global_handles()->RemoveImplicitRefGroups(); 1432 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
1432 1433
1433 // Flush code from collected candidates. 1434 // Flush code from collected candidates.
1434 if (is_code_flushing_enabled()) { 1435 if (is_code_flushing_enabled()) {
1435 code_flusher_->ProcessCandidates(); 1436 code_flusher_->ProcessCandidates();
1436 } 1437 }
1437 1438
1438 // Clean up dead objects from the runtime profiler. 1439 // Clean up dead objects from the runtime profiler.
1439 heap_->isolate()->runtime_profiler()->RemoveDeadSamples(); 1440 heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
1440 } 1441 }
1441 1442
1442 1443
1443 #ifdef DEBUG 1444 #ifdef DEBUG
1444 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { 1445 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1445 live_bytes_ += obj->Size(); 1446 live_bytes_ += obj->Size();
1446 if (HEAP->new_space()->Contains(obj)) { 1447 if (heap()->new_space()->Contains(obj)) {
1447 live_young_objects_size_ += obj->Size(); 1448 live_young_objects_size_ += obj->Size();
1448 } else if (HEAP->map_space()->Contains(obj)) { 1449 } else if (heap()->map_space()->Contains(obj)) {
1449 ASSERT(obj->IsMap()); 1450 ASSERT(obj->IsMap());
1450 live_map_objects_size_ += obj->Size(); 1451 live_map_objects_size_ += obj->Size();
1451 } else if (HEAP->cell_space()->Contains(obj)) { 1452 } else if (heap()->cell_space()->Contains(obj)) {
1452 ASSERT(obj->IsJSGlobalPropertyCell()); 1453 ASSERT(obj->IsJSGlobalPropertyCell());
1453 live_cell_objects_size_ += obj->Size(); 1454 live_cell_objects_size_ += obj->Size();
1454 } else if (HEAP->old_pointer_space()->Contains(obj)) { 1455 } else if (heap()->old_pointer_space()->Contains(obj)) {
1455 live_old_pointer_objects_size_ += obj->Size(); 1456 live_old_pointer_objects_size_ += obj->Size();
1456 } else if (HEAP->old_data_space()->Contains(obj)) { 1457 } else if (heap()->old_data_space()->Contains(obj)) {
1457 live_old_data_objects_size_ += obj->Size(); 1458 live_old_data_objects_size_ += obj->Size();
1458 } else if (HEAP->code_space()->Contains(obj)) { 1459 } else if (heap()->code_space()->Contains(obj)) {
1459 live_code_objects_size_ += obj->Size(); 1460 live_code_objects_size_ += obj->Size();
1460 } else if (HEAP->lo_space()->Contains(obj)) { 1461 } else if (heap()->lo_space()->Contains(obj)) {
1461 live_lo_objects_size_ += obj->Size(); 1462 live_lo_objects_size_ += obj->Size();
1462 } else { 1463 } else {
1463 UNREACHABLE(); 1464 UNREACHABLE();
1464 } 1465 }
1465 } 1466 }
1466 #endif // DEBUG 1467 #endif // DEBUG
1467 1468
1468 1469
1469 void MarkCompactCollector::SweepLargeObjectSpace() { 1470 void MarkCompactCollector::SweepLargeObjectSpace() {
1470 #ifdef DEBUG 1471 #ifdef DEBUG
1471 ASSERT(state_ == MARK_LIVE_OBJECTS); 1472 ASSERT(state_ == MARK_LIVE_OBJECTS);
1472 state_ = 1473 state_ =
1473 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; 1474 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
1474 #endif 1475 #endif
1475 // Deallocate unmarked objects and clear marked bits for marked objects. 1476 // Deallocate unmarked objects and clear marked bits for marked objects.
1476 HEAP->lo_space()->FreeUnmarkedObjects(); 1477 heap()->lo_space()->FreeUnmarkedObjects();
1477 } 1478 }
1478 1479
1479 1480
1480 // Safe to use during marking phase only. 1481 // Safe to use during marking phase only.
1481 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { 1482 bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
1482 MapWord metamap = object->map_word(); 1483 MapWord metamap = object->map_word();
1483 metamap.ClearMark(); 1484 metamap.ClearMark();
1484 return metamap.ToMap()->instance_type() == MAP_TYPE; 1485 return metamap.ToMap()->instance_type() == MAP_TYPE;
1485 } 1486 }
1486 1487
1487 1488
1488 void MarkCompactCollector::ClearNonLiveTransitions() { 1489 void MarkCompactCollector::ClearNonLiveTransitions() {
1489 HeapObjectIterator map_iterator(HEAP->map_space(), &SizeOfMarkedObject); 1490 HeapObjectIterator map_iterator(heap() ->map_space(), &SizeOfMarkedObject);
1490 // Iterate over the map space, setting map transitions that go from 1491 // Iterate over the map space, setting map transitions that go from
1491 // a marked map to an unmarked map to null transitions. At the same time, 1492 // a marked map to an unmarked map to null transitions. At the same time,
1492 // set all the prototype fields of maps back to their original value, 1493 // set all the prototype fields of maps back to their original value,
1493 // dropping the back pointers temporarily stored in the prototype field. 1494 // dropping the back pointers temporarily stored in the prototype field.
1494 // Setting the prototype field requires following the linked list of 1495 // Setting the prototype field requires following the linked list of
1495 // back pointers, reversing them all at once. This allows us to find 1496 // back pointers, reversing them all at once. This allows us to find
1496 // those maps with map transitions that need to be nulled, and only 1497 // those maps with map transitions that need to be nulled, and only
1497 // scan the descriptor arrays of those maps, not all maps. 1498 // scan the descriptor arrays of those maps, not all maps.
1498 // All of these actions are carried out only on maps of JSObjects 1499 // All of these actions are carried out only on maps of JSObjects
1499 // and related subtypes. 1500 // and related subtypes.
(...skipping 29 matching lines...) Expand all
1529 Object* next; 1530 Object* next;
1530 while (SafeIsMap(current)) { 1531 while (SafeIsMap(current)) {
1531 next = current->prototype(); 1532 next = current->prototype();
1532 // There should never be a dead map above a live map. 1533 // There should never be a dead map above a live map.
1533 ASSERT(on_dead_path || current->IsMarked()); 1534 ASSERT(on_dead_path || current->IsMarked());
1534 1535
1535 // A live map above a dead map indicates a dead transition. 1536 // A live map above a dead map indicates a dead transition.
1536 // This test will always be false on the first iteration. 1537 // This test will always be false on the first iteration.
1537 if (on_dead_path && current->IsMarked()) { 1538 if (on_dead_path && current->IsMarked()) {
1538 on_dead_path = false; 1539 on_dead_path = false;
1539 current->ClearNonLiveTransitions(heap_, real_prototype); 1540 current->ClearNonLiveTransitions(heap(), real_prototype);
1540 } 1541 }
1541 *HeapObject::RawField(current, Map::kPrototypeOffset) = 1542 *HeapObject::RawField(current, Map::kPrototypeOffset) =
1542 real_prototype; 1543 real_prototype;
1543 current = reinterpret_cast<Map*>(next); 1544 current = reinterpret_cast<Map*>(next);
1544 } 1545 }
1545 } 1546 }
1546 } 1547 }
1547 1548
1548 // ------------------------------------------------------------------------- 1549 // -------------------------------------------------------------------------
1549 // Phase 2: Encode forwarding addresses. 1550 // Phase 2: Encode forwarding addresses.
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
1764 } 1765 }
1765 1766
1766 1767
1767 // Functions to encode the forwarding pointers in each compactable space. 1768 // Functions to encode the forwarding pointers in each compactable space.
1768 void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() { 1769 void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
1769 int ignored; 1770 int ignored;
1770 EncodeForwardingAddressesInRange<MCAllocateFromNewSpace, 1771 EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
1771 EncodeForwardingAddressInNewSpace, 1772 EncodeForwardingAddressInNewSpace,
1772 IgnoreNonLiveObject>( 1773 IgnoreNonLiveObject>(
1773 this, 1774 this,
1774 heap_->new_space()->bottom(), 1775 heap()->new_space()->bottom(),
1775 heap_->new_space()->top(), 1776 heap()->new_space()->top(),
1776 &ignored); 1777 &ignored);
1777 } 1778 }
1778 1779
1779 1780
1780 template<MarkCompactCollector::AllocationFunction Alloc, 1781 template<MarkCompactCollector::AllocationFunction Alloc,
1781 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive> 1782 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1782 void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace( 1783 void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
1783 PagedSpace* space) { 1784 PagedSpace* space) {
1784 PageIterator it(space, PageIterator::PAGES_IN_USE); 1785 PageIterator it(space, PageIterator::PAGES_IN_USE);
1785 while (it.has_next()) { 1786 while (it.has_next()) {
(...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after
2187 space->SetTop(new_allocation_top); 2188 space->SetTop(new_allocation_top);
2188 } 2189 }
2189 } 2190 }
2190 2191
2191 2192
2192 void MarkCompactCollector::EncodeForwardingAddresses() { 2193 void MarkCompactCollector::EncodeForwardingAddresses() {
2193 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); 2194 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2194 // Objects in the active semispace of the young generation may be 2195 // Objects in the active semispace of the young generation may be
2195 // relocated to the inactive semispace (if not promoted). Set the 2196 // relocated to the inactive semispace (if not promoted). Set the
2196 // relocation info to the beginning of the inactive semispace. 2197 // relocation info to the beginning of the inactive semispace.
2197 heap_->new_space()->MCResetRelocationInfo(); 2198 heap()->new_space()->MCResetRelocationInfo();
2198 2199
2199 // Compute the forwarding pointers in each space. 2200 // Compute the forwarding pointers in each space.
2200 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, 2201 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
2201 ReportDeleteIfNeeded>( 2202 ReportDeleteIfNeeded>(
2202 heap_->old_pointer_space()); 2203 heap()->old_pointer_space());
2203 2204
2204 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace, 2205 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
2205 IgnoreNonLiveObject>( 2206 IgnoreNonLiveObject>(
2206 heap_->old_data_space()); 2207 heap()->old_data_space());
2207 2208
2208 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace, 2209 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
2209 ReportDeleteIfNeeded>( 2210 ReportDeleteIfNeeded>(
2210 heap_->code_space()); 2211 heap()->code_space());
2211 2212
2212 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace, 2213 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
2213 IgnoreNonLiveObject>( 2214 IgnoreNonLiveObject>(
2214 heap_->cell_space()); 2215 heap()->cell_space());
2215 2216
2216 2217
2217 // Compute new space next to last after the old and code spaces have been 2218 // Compute new space next to last after the old and code spaces have been
2218 // compacted. Objects in new space can be promoted to old or code space. 2219 // compacted. Objects in new space can be promoted to old or code space.
2219 EncodeForwardingAddressesInNewSpace(); 2220 EncodeForwardingAddressesInNewSpace();
2220 2221
2221 // Compute map space last because computing forwarding addresses 2222 // Compute map space last because computing forwarding addresses
2222 // overwrites non-live objects. Objects in the other spaces rely on 2223 // overwrites non-live objects. Objects in the other spaces rely on
2223 // non-live map pointers to get the sizes of non-live objects. 2224 // non-live map pointers to get the sizes of non-live objects.
2224 EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace, 2225 EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
2225 IgnoreNonLiveObject>( 2226 IgnoreNonLiveObject>(
2226 heap_->map_space()); 2227 heap()->map_space());
2227 2228
2228 // Write relocation info to the top page, so we can use it later. This is 2229 // Write relocation info to the top page, so we can use it later. This is
2229 // done after promoting objects from the new space so we get the correct 2230 // done after promoting objects from the new space so we get the correct
2230 // allocation top. 2231 // allocation top.
2231 heap_->old_pointer_space()->MCWriteRelocationInfoToPage(); 2232 heap()->old_pointer_space()->MCWriteRelocationInfoToPage();
2232 heap_->old_data_space()->MCWriteRelocationInfoToPage(); 2233 heap()->old_data_space()->MCWriteRelocationInfoToPage();
2233 heap_->code_space()->MCWriteRelocationInfoToPage(); 2234 heap()->code_space()->MCWriteRelocationInfoToPage();
2234 heap_->map_space()->MCWriteRelocationInfoToPage(); 2235 heap()->map_space()->MCWriteRelocationInfoToPage();
2235 heap_->cell_space()->MCWriteRelocationInfoToPage(); 2236 heap()->cell_space()->MCWriteRelocationInfoToPage();
2236 } 2237 }
2237 2238
2238 2239
2239 class MapIterator : public HeapObjectIterator { 2240 class MapIterator : public HeapObjectIterator {
2240 public: 2241 public:
2241 MapIterator() : HeapObjectIterator(HEAP->map_space(), &SizeCallback) { } 2242 explicit MapIterator(Heap* heap)
2243 : HeapObjectIterator(heap->map_space(), &SizeCallback) { }
2242 2244
2243 explicit MapIterator(Address start) 2245 MapIterator(Heap* heap, Address start)
2244 : HeapObjectIterator(HEAP->map_space(), start, &SizeCallback) { } 2246 : HeapObjectIterator(heap->map_space(), start, &SizeCallback) { }
2245 2247
2246 private: 2248 private:
2247 static int SizeCallback(HeapObject* unused) { 2249 static int SizeCallback(HeapObject* unused) {
2248 USE(unused); 2250 USE(unused);
2249 return Map::kSize; 2251 return Map::kSize;
2250 } 2252 }
2251 }; 2253 };
2252 2254
2253 2255
2254 class MapCompact { 2256 class MapCompact {
2255 public: 2257 public:
2256 explicit MapCompact(Heap* heap, int live_maps) 2258 explicit MapCompact(Heap* heap, int live_maps)
2257 : heap_(heap), 2259 : heap_(heap),
2258 live_maps_(live_maps), 2260 live_maps_(live_maps),
2259 to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)), 2261 to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
2260 map_to_evacuate_it_(to_evacuate_start_), 2262 vacant_map_it_(heap),
2263 map_to_evacuate_it_(heap, to_evacuate_start_),
2261 first_map_to_evacuate_( 2264 first_map_to_evacuate_(
2262 reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) { 2265 reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
2263 } 2266 }
2264 2267
2265 void CompactMaps() { 2268 void CompactMaps() {
2266 // As we know the number of maps to evacuate beforehand, 2269 // As we know the number of maps to evacuate beforehand,
2267 // we stop then there is no more vacant maps. 2270 // we stop then there is no more vacant maps.
2268 for (Map* next_vacant_map = NextVacantMap(); 2271 for (Map* next_vacant_map = NextVacantMap();
2269 next_vacant_map; 2272 next_vacant_map;
2270 next_vacant_map = NextVacantMap()) { 2273 next_vacant_map = NextVacantMap()) {
2271 EvacuateMap(next_vacant_map, NextMapToEvacuate()); 2274 EvacuateMap(next_vacant_map, NextMapToEvacuate());
2272 } 2275 }
2273 2276
2274 #ifdef DEBUG 2277 #ifdef DEBUG
2275 CheckNoMapsToEvacuate(); 2278 CheckNoMapsToEvacuate();
2276 #endif 2279 #endif
2277 } 2280 }
2278 2281
2279 void UpdateMapPointersInRoots() { 2282 void UpdateMapPointersInRoots() {
2280 MapUpdatingVisitor map_updating_visitor; 2283 MapUpdatingVisitor map_updating_visitor;
2281 heap_->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG); 2284 heap()->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
2282 heap_->isolate()->global_handles()->IterateWeakRoots(&map_updating_visitor); 2285 heap()->isolate()->global_handles()->IterateWeakRoots(
2286 &map_updating_visitor);
2283 LiveObjectList::IterateElements(&map_updating_visitor); 2287 LiveObjectList::IterateElements(&map_updating_visitor);
2284 } 2288 }
2285 2289
2286 void UpdateMapPointersInPagedSpace(PagedSpace* space) { 2290 void UpdateMapPointersInPagedSpace(PagedSpace* space) {
2287 ASSERT(space != heap_->map_space()); 2291 ASSERT(space != heap()->map_space());
2288 2292
2289 PageIterator it(space, PageIterator::PAGES_IN_USE); 2293 PageIterator it(space, PageIterator::PAGES_IN_USE);
2290 while (it.has_next()) { 2294 while (it.has_next()) {
2291 Page* p = it.next(); 2295 Page* p = it.next();
2292 UpdateMapPointersInRange(heap_, p->ObjectAreaStart(), p->AllocationTop()); 2296 UpdateMapPointersInRange(heap(),
2297 p->ObjectAreaStart(),
2298 p->AllocationTop());
2293 } 2299 }
2294 } 2300 }
2295 2301
2296 void UpdateMapPointersInNewSpace() { 2302 void UpdateMapPointersInNewSpace() {
2297 NewSpace* space = heap_->new_space(); 2303 NewSpace* space = heap()->new_space();
2298 UpdateMapPointersInRange(heap_, space->bottom(), space->top()); 2304 UpdateMapPointersInRange(heap(), space->bottom(), space->top());
2299 } 2305 }
2300 2306
2301 void UpdateMapPointersInLargeObjectSpace() { 2307 void UpdateMapPointersInLargeObjectSpace() {
2302 LargeObjectIterator it(heap_->lo_space()); 2308 LargeObjectIterator it(heap()->lo_space());
2303 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) 2309 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
2304 UpdateMapPointersInObject(heap_, obj); 2310 UpdateMapPointersInObject(heap(), obj);
2305 } 2311 }
2306 2312
2307 void Finish() { 2313 void Finish() {
2308 heap_->map_space()->FinishCompaction(to_evacuate_start_, live_maps_); 2314 heap()->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2309 } 2315 }
2310 2316
2317 inline Heap* heap() const { return heap_; }
2318
2311 private: 2319 private:
2312 Heap* heap_; 2320 Heap* heap_;
2313 int live_maps_; 2321 int live_maps_;
2314 Address to_evacuate_start_; 2322 Address to_evacuate_start_;
2315 MapIterator vacant_map_it_; 2323 MapIterator vacant_map_it_;
2316 MapIterator map_to_evacuate_it_; 2324 MapIterator map_to_evacuate_it_;
2317 Map* first_map_to_evacuate_; 2325 Map* first_map_to_evacuate_;
2318 2326
2319 // Helper class for updating map pointers in HeapObjects. 2327 // Helper class for updating map pointers in HeapObjects.
2320 class MapUpdatingVisitor: public ObjectVisitor { 2328 class MapUpdatingVisitor: public ObjectVisitor {
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
2450 void MarkCompactCollector::SweepSpaces() { 2458 void MarkCompactCollector::SweepSpaces() {
2451 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); 2459 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2452 2460
2453 ASSERT(state_ == SWEEP_SPACES); 2461 ASSERT(state_ == SWEEP_SPACES);
2454 ASSERT(!IsCompacting()); 2462 ASSERT(!IsCompacting());
2455 // Noncompacting collections simply sweep the spaces to clear the mark 2463 // Noncompacting collections simply sweep the spaces to clear the mark
2456 // bits and free the nonlive blocks (for old and map spaces). We sweep 2464 // bits and free the nonlive blocks (for old and map spaces). We sweep
2457 // the map space last because freeing non-live maps overwrites them and 2465 // the map space last because freeing non-live maps overwrites them and
2458 // the other spaces rely on possibly non-live maps to get the sizes for 2466 // the other spaces rely on possibly non-live maps to get the sizes for
2459 // non-live objects. 2467 // non-live objects.
2460 SweepSpace(heap_, heap_->old_pointer_space()); 2468 SweepSpace(heap(), heap()->old_pointer_space());
2461 SweepSpace(heap_, heap_->old_data_space()); 2469 SweepSpace(heap(), heap()->old_data_space());
2462 SweepSpace(heap_, heap_->code_space()); 2470 SweepSpace(heap(), heap()->code_space());
2463 SweepSpace(heap_, heap_->cell_space()); 2471 SweepSpace(heap(), heap()->cell_space());
2464 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); 2472 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2465 SweepNewSpace(heap_, heap_->new_space()); 2473 SweepNewSpace(heap(), heap()->new_space());
2466 } 2474 }
2467 SweepSpace(heap_, heap_->map_space()); 2475 SweepSpace(heap(), heap()->map_space());
2468 2476
2469 heap_->IterateDirtyRegions(heap_->map_space(), 2477 heap()->IterateDirtyRegions(heap()->map_space(),
2470 &heap_->IteratePointersInDirtyMapsRegion, 2478 &heap()->IteratePointersInDirtyMapsRegion,
2471 &UpdatePointerToNewGen, 2479 &UpdatePointerToNewGen,
2472 heap_->WATERMARK_SHOULD_BE_VALID); 2480 heap()->WATERMARK_SHOULD_BE_VALID);
2473 2481
2474 intptr_t live_maps_size = heap_->map_space()->Size(); 2482 intptr_t live_maps_size = heap()->map_space()->Size();
2475 int live_maps = static_cast<int>(live_maps_size / Map::kSize); 2483 int live_maps = static_cast<int>(live_maps_size / Map::kSize);
2476 ASSERT(live_map_objects_size_ == live_maps_size); 2484 ASSERT(live_map_objects_size_ == live_maps_size);
2477 2485
2478 if (heap_->map_space()->NeedsCompaction(live_maps)) { 2486 if (heap()->map_space()->NeedsCompaction(live_maps)) {
2479 MapCompact map_compact(heap_, live_maps); 2487 MapCompact map_compact(heap(), live_maps);
2480 2488
2481 map_compact.CompactMaps(); 2489 map_compact.CompactMaps();
2482 map_compact.UpdateMapPointersInRoots(); 2490 map_compact.UpdateMapPointersInRoots();
2483 2491
2484 PagedSpaces spaces; 2492 PagedSpaces spaces;
2485 for (PagedSpace* space = spaces.next(); 2493 for (PagedSpace* space = spaces.next();
2486 space != NULL; space = spaces.next()) { 2494 space != NULL; space = spaces.next()) {
2487 if (space == heap_->map_space()) continue; 2495 if (space == heap()->map_space()) continue;
2488 map_compact.UpdateMapPointersInPagedSpace(space); 2496 map_compact.UpdateMapPointersInPagedSpace(space);
2489 } 2497 }
2490 map_compact.UpdateMapPointersInNewSpace(); 2498 map_compact.UpdateMapPointersInNewSpace();
2491 map_compact.UpdateMapPointersInLargeObjectSpace(); 2499 map_compact.UpdateMapPointersInLargeObjectSpace();
2492 2500
2493 map_compact.Finish(); 2501 map_compact.Finish();
2494 } 2502 }
2495 } 2503 }
2496 2504
2497 2505
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
2573 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 2581 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2574 rinfo->IsPatchedReturnSequence()) || 2582 rinfo->IsPatchedReturnSequence()) ||
2575 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 2583 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2576 rinfo->IsPatchedDebugBreakSlotSequence())); 2584 rinfo->IsPatchedDebugBreakSlotSequence()));
2577 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 2585 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
2578 VisitPointer(&target); 2586 VisitPointer(&target);
2579 rinfo->set_call_address( 2587 rinfo->set_call_address(
2580 reinterpret_cast<Code*>(target)->instruction_start()); 2588 reinterpret_cast<Code*>(target)->instruction_start());
2581 } 2589 }
2582 2590
2591 inline Heap* heap() const { return heap_; }
2592
2583 private: 2593 private:
2584 void UpdatePointer(Object** p) { 2594 void UpdatePointer(Object** p) {
2585 if (!(*p)->IsHeapObject()) return; 2595 if (!(*p)->IsHeapObject()) return;
2586 2596
2587 HeapObject* obj = HeapObject::cast(*p); 2597 HeapObject* obj = HeapObject::cast(*p);
2588 Address old_addr = obj->address(); 2598 Address old_addr = obj->address();
2589 Address new_addr; 2599 Address new_addr;
2590 ASSERT(!heap_->InFromSpace(obj)); 2600 ASSERT(!heap()->InFromSpace(obj));
2591 2601
2592 if (heap_->new_space()->Contains(obj)) { 2602 if (heap()->new_space()->Contains(obj)) {
2593 Address forwarding_pointer_addr = 2603 Address forwarding_pointer_addr =
2594 heap_->new_space()->FromSpaceLow() + 2604 heap()->new_space()->FromSpaceLow() +
2595 heap_->new_space()->ToSpaceOffsetForAddress(old_addr); 2605 heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
2596 new_addr = Memory::Address_at(forwarding_pointer_addr); 2606 new_addr = Memory::Address_at(forwarding_pointer_addr);
2597 2607
2598 #ifdef DEBUG 2608 #ifdef DEBUG
2599 ASSERT(heap_->old_pointer_space()->Contains(new_addr) || 2609 ASSERT(heap()->old_pointer_space()->Contains(new_addr) ||
2600 heap_->old_data_space()->Contains(new_addr) || 2610 heap()->old_data_space()->Contains(new_addr) ||
2601 heap_->new_space()->FromSpaceContains(new_addr) || 2611 heap()->new_space()->FromSpaceContains(new_addr) ||
2602 heap_->lo_space()->Contains(HeapObject::FromAddress(new_addr))); 2612 heap()->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2603 2613
2604 if (heap_->new_space()->FromSpaceContains(new_addr)) { 2614 if (heap()->new_space()->FromSpaceContains(new_addr)) {
2605 ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <= 2615 ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
2606 heap_->new_space()->ToSpaceOffsetForAddress(old_addr)); 2616 heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
2607 } 2617 }
2608 #endif 2618 #endif
2609 2619
2610 } else if (heap_->lo_space()->Contains(obj)) { 2620 } else if (heap()->lo_space()->Contains(obj)) {
2611 // Don't move objects in the large object space. 2621 // Don't move objects in the large object space.
2612 return; 2622 return;
2613 2623
2614 } else { 2624 } else {
2615 #ifdef DEBUG 2625 #ifdef DEBUG
2616 PagedSpaces spaces; 2626 PagedSpaces spaces;
2617 PagedSpace* original_space = spaces.next(); 2627 PagedSpace* original_space = spaces.next();
2618 while (original_space != NULL) { 2628 while (original_space != NULL) {
2619 if (original_space->Contains(obj)) break; 2629 if (original_space->Contains(obj)) break;
2620 original_space = spaces.next(); 2630 original_space = spaces.next();
(...skipping 18 matching lines...) Expand all
2639 2649
2640 Heap* heap_; 2650 Heap* heap_;
2641 }; 2651 };
2642 2652
2643 2653
2644 void MarkCompactCollector::UpdatePointers() { 2654 void MarkCompactCollector::UpdatePointers() {
2645 #ifdef DEBUG 2655 #ifdef DEBUG
2646 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); 2656 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2647 state_ = UPDATE_POINTERS; 2657 state_ = UPDATE_POINTERS;
2648 #endif 2658 #endif
2649 UpdatingVisitor updating_visitor(heap_); 2659 UpdatingVisitor updating_visitor(heap());
2650 heap_->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( 2660 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
2651 &updating_visitor); 2661 &updating_visitor);
2652 heap_->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG); 2662 heap()->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2653 heap_->isolate()->global_handles()->IterateWeakRoots(&updating_visitor); 2663 heap()->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
2654 2664
2655 // Update the pointer to the head of the weak list of global contexts. 2665 // Update the pointer to the head of the weak list of global contexts.
2656 updating_visitor.VisitPointer(&heap_->global_contexts_list_); 2666 updating_visitor.VisitPointer(&heap()->global_contexts_list_);
2657 2667
2658 LiveObjectList::IterateElements(&updating_visitor); 2668 LiveObjectList::IterateElements(&updating_visitor);
2659 2669
2660 int live_maps_size = IterateLiveObjects( 2670 int live_maps_size = IterateLiveObjects(
2661 heap_->map_space(), &MarkCompactCollector::UpdatePointersInOldObject); 2671 heap()->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2662 int live_pointer_olds_size = IterateLiveObjects( 2672 int live_pointer_olds_size = IterateLiveObjects(
2663 heap_->old_pointer_space(), 2673 heap()->old_pointer_space(),
2664 &MarkCompactCollector::UpdatePointersInOldObject); 2674 &MarkCompactCollector::UpdatePointersInOldObject);
2665 int live_data_olds_size = IterateLiveObjects( 2675 int live_data_olds_size = IterateLiveObjects(
2666 heap_->old_data_space(), 2676 heap()->old_data_space(),
2667 &MarkCompactCollector::UpdatePointersInOldObject); 2677 &MarkCompactCollector::UpdatePointersInOldObject);
2668 int live_codes_size = IterateLiveObjects( 2678 int live_codes_size = IterateLiveObjects(
2669 heap_->code_space(), &MarkCompactCollector::UpdatePointersInOldObject); 2679 heap()->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2670 int live_cells_size = IterateLiveObjects( 2680 int live_cells_size = IterateLiveObjects(
2671 heap_->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject); 2681 heap()->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2672 int live_news_size = IterateLiveObjects( 2682 int live_news_size = IterateLiveObjects(
2673 heap_->new_space(), &MarkCompactCollector::UpdatePointersInNewObject); 2683 heap()->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
2674 2684
2675 // Large objects do not move, the map word can be updated directly. 2685 // Large objects do not move, the map word can be updated directly.
2676 LargeObjectIterator it(heap_->lo_space()); 2686 LargeObjectIterator it(heap()->lo_space());
2677 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { 2687 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2678 UpdatePointersInNewObject(obj); 2688 UpdatePointersInNewObject(obj);
2679 } 2689 }
2680 2690
2681 USE(live_maps_size); 2691 USE(live_maps_size);
2682 USE(live_pointer_olds_size); 2692 USE(live_pointer_olds_size);
2683 USE(live_data_olds_size); 2693 USE(live_data_olds_size);
2684 USE(live_codes_size); 2694 USE(live_codes_size);
2685 USE(live_cells_size); 2695 USE(live_cells_size);
2686 USE(live_news_size); 2696 USE(live_news_size);
2687 ASSERT(live_maps_size == live_map_objects_size_); 2697 ASSERT(live_maps_size == live_map_objects_size_);
2688 ASSERT(live_data_olds_size == live_old_data_objects_size_); 2698 ASSERT(live_data_olds_size == live_old_data_objects_size_);
2689 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); 2699 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2690 ASSERT(live_codes_size == live_code_objects_size_); 2700 ASSERT(live_codes_size == live_code_objects_size_);
2691 ASSERT(live_cells_size == live_cell_objects_size_); 2701 ASSERT(live_cells_size == live_cell_objects_size_);
2692 ASSERT(live_news_size == live_young_objects_size_); 2702 ASSERT(live_news_size == live_young_objects_size_);
2693 } 2703 }
2694 2704
2695 2705
2696 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { 2706 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
2697 // Keep old map pointers 2707 // Keep old map pointers
2698 Map* old_map = obj->map(); 2708 Map* old_map = obj->map();
2699 ASSERT(old_map->IsHeapObject()); 2709 ASSERT(old_map->IsHeapObject());
2700 2710
2701 Address forwarded = GetForwardingAddressInOldSpace(old_map); 2711 Address forwarded = GetForwardingAddressInOldSpace(old_map);
2702 2712
2703 ASSERT(heap_->map_space()->Contains(old_map)); 2713 ASSERT(heap()->map_space()->Contains(old_map));
2704 ASSERT(heap_->map_space()->Contains(forwarded)); 2714 ASSERT(heap()->map_space()->Contains(forwarded));
2705 #ifdef DEBUG 2715 #ifdef DEBUG
2706 if (FLAG_gc_verbose) { 2716 if (FLAG_gc_verbose) {
2707 PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(), 2717 PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
2708 forwarded); 2718 forwarded);
2709 } 2719 }
2710 #endif 2720 #endif
2711 // Update the map pointer. 2721 // Update the map pointer.
2712 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded))); 2722 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
2713 2723
2714 // We have to compute the object size relying on the old map because 2724 // We have to compute the object size relying on the old map because
2715 // map objects are not relocated yet. 2725 // map objects are not relocated yet.
2716 int obj_size = obj->SizeFromMap(old_map); 2726 int obj_size = obj->SizeFromMap(old_map);
2717 2727
2718 // Update pointers in the object body. 2728 // Update pointers in the object body.
2719 UpdatingVisitor updating_visitor(heap_); 2729 UpdatingVisitor updating_visitor(heap());
2720 obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor); 2730 obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
2721 return obj_size; 2731 return obj_size;
2722 } 2732 }
2723 2733
2724 2734
2725 int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) { 2735 int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
2726 // Decode the map pointer. 2736 // Decode the map pointer.
2727 MapWord encoding = obj->map_word(); 2737 MapWord encoding = obj->map_word();
2728 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); 2738 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2729 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); 2739 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2730 2740
2731 // At this point, the first word of map_addr is also encoded, cannot 2741 // At this point, the first word of map_addr is also encoded, cannot
2732 // cast it to Map* using Map::cast. 2742 // cast it to Map* using Map::cast.
2733 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)); 2743 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
2734 int obj_size = obj->SizeFromMap(map); 2744 int obj_size = obj->SizeFromMap(map);
2735 InstanceType type = map->instance_type(); 2745 InstanceType type = map->instance_type();
2736 2746
2737 // Update map pointer. 2747 // Update map pointer.
2738 Address new_map_addr = GetForwardingAddressInOldSpace(map); 2748 Address new_map_addr = GetForwardingAddressInOldSpace(map);
2739 int offset = encoding.DecodeOffset(); 2749 int offset = encoding.DecodeOffset();
2740 obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset)); 2750 obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
2741 2751
2742 #ifdef DEBUG 2752 #ifdef DEBUG
2743 if (FLAG_gc_verbose) { 2753 if (FLAG_gc_verbose) {
2744 PrintF("update %p : %p -> %p\n", obj->address(), 2754 PrintF("update %p : %p -> %p\n", obj->address(),
2745 map_addr, new_map_addr); 2755 map_addr, new_map_addr);
2746 } 2756 }
2747 #endif 2757 #endif
2748 2758
2749 // Update pointers in the object body. 2759 // Update pointers in the object body.
2750 UpdatingVisitor updating_visitor(heap_); 2760 UpdatingVisitor updating_visitor(heap());
2751 obj->IterateBody(type, obj_size, &updating_visitor); 2761 obj->IterateBody(type, obj_size, &updating_visitor);
2752 return obj_size; 2762 return obj_size;
2753 } 2763 }
2754 2764
2755 2765
2756 Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) { 2766 Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
2757 // Object should either in old or map space. 2767 // Object should either in old or map space.
2758 MapWord encoding = obj->map_word(); 2768 MapWord encoding = obj->map_word();
2759 2769
2760 // Offset to the first live object's forwarding address. 2770 // Offset to the first live object's forwarding address.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2797 // Phase 4: Relocate objects 2807 // Phase 4: Relocate objects
2798 2808
2799 void MarkCompactCollector::RelocateObjects() { 2809 void MarkCompactCollector::RelocateObjects() {
2800 #ifdef DEBUG 2810 #ifdef DEBUG
2801 ASSERT(state_ == UPDATE_POINTERS); 2811 ASSERT(state_ == UPDATE_POINTERS);
2802 state_ = RELOCATE_OBJECTS; 2812 state_ = RELOCATE_OBJECTS;
2803 #endif 2813 #endif
2804 // Relocates objects, always relocate map objects first. Relocating 2814 // Relocates objects, always relocate map objects first. Relocating
2805 // objects in other space relies on map objects to get object size. 2815 // objects in other space relies on map objects to get object size.
2806 int live_maps_size = IterateLiveObjects( 2816 int live_maps_size = IterateLiveObjects(
2807 heap_->map_space(), &MarkCompactCollector::RelocateMapObject); 2817 heap()->map_space(), &MarkCompactCollector::RelocateMapObject);
2808 int live_pointer_olds_size = IterateLiveObjects( 2818 int live_pointer_olds_size = IterateLiveObjects(
2809 heap_->old_pointer_space(), 2819 heap()->old_pointer_space(),
2810 &MarkCompactCollector::RelocateOldPointerObject); 2820 &MarkCompactCollector::RelocateOldPointerObject);
2811 int live_data_olds_size = IterateLiveObjects( 2821 int live_data_olds_size = IterateLiveObjects(
2812 heap_->old_data_space(), &MarkCompactCollector::RelocateOldDataObject); 2822 heap()->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
2813 int live_codes_size = IterateLiveObjects( 2823 int live_codes_size = IterateLiveObjects(
2814 heap_->code_space(), &MarkCompactCollector::RelocateCodeObject); 2824 heap()->code_space(), &MarkCompactCollector::RelocateCodeObject);
2815 int live_cells_size = IterateLiveObjects( 2825 int live_cells_size = IterateLiveObjects(
2816 heap_->cell_space(), &MarkCompactCollector::RelocateCellObject); 2826 heap()->cell_space(), &MarkCompactCollector::RelocateCellObject);
2817 int live_news_size = IterateLiveObjects( 2827 int live_news_size = IterateLiveObjects(
2818 heap_->new_space(), &MarkCompactCollector::RelocateNewObject); 2828 heap()->new_space(), &MarkCompactCollector::RelocateNewObject);
2819 2829
2820 USE(live_maps_size); 2830 USE(live_maps_size);
2821 USE(live_pointer_olds_size); 2831 USE(live_pointer_olds_size);
2822 USE(live_data_olds_size); 2832 USE(live_data_olds_size);
2823 USE(live_codes_size); 2833 USE(live_codes_size);
2824 USE(live_cells_size); 2834 USE(live_cells_size);
2825 USE(live_news_size); 2835 USE(live_news_size);
2826 ASSERT(live_maps_size == live_map_objects_size_); 2836 ASSERT(live_maps_size == live_map_objects_size_);
2827 ASSERT(live_data_olds_size == live_old_data_objects_size_); 2837 ASSERT(live_data_olds_size == live_old_data_objects_size_);
2828 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); 2838 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2829 ASSERT(live_codes_size == live_code_objects_size_); 2839 ASSERT(live_codes_size == live_code_objects_size_);
2830 ASSERT(live_cells_size == live_cell_objects_size_); 2840 ASSERT(live_cells_size == live_cell_objects_size_);
2831 ASSERT(live_news_size == live_young_objects_size_); 2841 ASSERT(live_news_size == live_young_objects_size_);
2832 2842
2833 // Flip from and to spaces 2843 // Flip from and to spaces
2834 heap_->new_space()->Flip(); 2844 heap()->new_space()->Flip();
2835 2845
2836 heap_->new_space()->MCCommitRelocationInfo(); 2846 heap()->new_space()->MCCommitRelocationInfo();
2837 2847
2838 // Set age_mark to bottom in to space 2848 // Set age_mark to bottom in to space
2839 Address mark = heap_->new_space()->bottom(); 2849 Address mark = heap()->new_space()->bottom();
2840 heap_->new_space()->set_age_mark(mark); 2850 heap()->new_space()->set_age_mark(mark);
2841 2851
2842 PagedSpaces spaces; 2852 PagedSpaces spaces;
2843 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) 2853 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
2844 space->MCCommitRelocationInfo(); 2854 space->MCCommitRelocationInfo();
2845 2855
2846 heap_->CheckNewSpaceExpansionCriteria(); 2856 heap()->CheckNewSpaceExpansionCriteria();
2847 heap_->IncrementYoungSurvivorsCounter(live_news_size); 2857 heap()->IncrementYoungSurvivorsCounter(live_news_size);
2848 } 2858 }
2849 2859
2850 2860
2851 int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { 2861 int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
2852 // Recover map pointer. 2862 // Recover map pointer.
2853 MapWord encoding = obj->map_word(); 2863 MapWord encoding = obj->map_word();
2854 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); 2864 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2855 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); 2865 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2856 2866
2857 // Get forwarding address before resetting map pointer 2867 // Get forwarding address before resetting map pointer
2858 Address new_addr = GetForwardingAddressInOldSpace(obj); 2868 Address new_addr = GetForwardingAddressInOldSpace(obj);
2859 2869
2860 // Reset map pointer. The meta map object may not be copied yet so 2870 // Reset map pointer. The meta map object may not be copied yet so
2861 // Map::cast does not yet work. 2871 // Map::cast does not yet work.
2862 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr))); 2872 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
2863 2873
2864 Address old_addr = obj->address(); 2874 Address old_addr = obj->address();
2865 2875
2866 if (new_addr != old_addr) { 2876 if (new_addr != old_addr) {
2867 // Move contents. 2877 // Move contents.
2868 heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, 2878 heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2869 old_addr, 2879 old_addr,
2870 Map::kSize); 2880 Map::kSize);
2871 } 2881 }
2872 2882
2873 #ifdef DEBUG 2883 #ifdef DEBUG
2874 if (FLAG_gc_verbose) { 2884 if (FLAG_gc_verbose) {
2875 PrintF("relocate %p -> %p\n", old_addr, new_addr); 2885 PrintF("relocate %p -> %p\n", old_addr, new_addr);
2876 } 2886 }
2877 #endif 2887 #endif
2878 2888
(...skipping 25 matching lines...) Expand all
2904 #endif 2914 #endif
2905 2915
2906 return obj_size; 2916 return obj_size;
2907 } 2917 }
2908 2918
2909 2919
2910 int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj, 2920 int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
2911 PagedSpace* space) { 2921 PagedSpace* space) {
2912 // Recover map pointer. 2922 // Recover map pointer.
2913 MapWord encoding = obj->map_word(); 2923 MapWord encoding = obj->map_word();
2914 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); 2924 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2915 ASSERT(heap_->map_space()->Contains(map_addr)); 2925 ASSERT(heap()->map_space()->Contains(map_addr));
2916 2926
2917 // Get forwarding address before resetting map pointer. 2927 // Get forwarding address before resetting map pointer.
2918 Address new_addr = GetForwardingAddressInOldSpace(obj); 2928 Address new_addr = GetForwardingAddressInOldSpace(obj);
2919 2929
2920 // Reset the map pointer. 2930 // Reset the map pointer.
2921 int obj_size = RestoreMap(obj, space, new_addr, map_addr); 2931 int obj_size = RestoreMap(obj, space, new_addr, map_addr);
2922 2932
2923 Address old_addr = obj->address(); 2933 Address old_addr = obj->address();
2924 2934
2925 if (new_addr != old_addr) { 2935 if (new_addr != old_addr) {
2926 // Move contents. 2936 // Move contents.
2927 if (space == heap_->old_data_space()) { 2937 if (space == heap()->old_data_space()) {
2928 heap_->MoveBlock(new_addr, old_addr, obj_size); 2938 heap()->MoveBlock(new_addr, old_addr, obj_size);
2929 } else { 2939 } else {
2930 heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, 2940 heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2931 old_addr, 2941 old_addr,
2932 obj_size); 2942 obj_size);
2933 } 2943 }
2934 } 2944 }
2935 2945
2936 ASSERT(!HeapObject::FromAddress(new_addr)->IsCode()); 2946 ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
2937 2947
2938 HeapObject* copied_to = HeapObject::FromAddress(new_addr); 2948 HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2939 if (copied_to->IsSharedFunctionInfo()) { 2949 if (copied_to->IsSharedFunctionInfo()) {
2940 PROFILE(heap_->isolate(), 2950 PROFILE(heap()->isolate(),
2941 SharedFunctionInfoMoveEvent(old_addr, new_addr)); 2951 SharedFunctionInfoMoveEvent(old_addr, new_addr));
2942 } 2952 }
2943 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); 2953 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
2944 2954
2945 return obj_size; 2955 return obj_size;
2946 } 2956 }
2947 2957
2948 2958
2949 int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) { 2959 int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
2950 return RelocateOldNonCodeObject(obj, heap_->old_pointer_space()); 2960 return RelocateOldNonCodeObject(obj, heap()->old_pointer_space());
2951 } 2961 }
2952 2962
2953 2963
2954 int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) { 2964 int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
2955 return RelocateOldNonCodeObject(obj, heap_->old_data_space()); 2965 return RelocateOldNonCodeObject(obj, heap()->old_data_space());
2956 } 2966 }
2957 2967
2958 2968
2959 int MarkCompactCollector::RelocateCellObject(HeapObject* obj) { 2969 int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
2960 return RelocateOldNonCodeObject(obj, heap_->cell_space()); 2970 return RelocateOldNonCodeObject(obj, heap()->cell_space());
2961 } 2971 }
2962 2972
2963 2973
2964 int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) { 2974 int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
2965 // Recover map pointer. 2975 // Recover map pointer.
2966 MapWord encoding = obj->map_word(); 2976 MapWord encoding = obj->map_word();
2967 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); 2977 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2968 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); 2978 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2969 2979
2970 // Get forwarding address before resetting map pointer 2980 // Get forwarding address before resetting map pointer
2971 Address new_addr = GetForwardingAddressInOldSpace(obj); 2981 Address new_addr = GetForwardingAddressInOldSpace(obj);
2972 2982
2973 // Reset the map pointer. 2983 // Reset the map pointer.
2974 int obj_size = RestoreMap(obj, heap_->code_space(), new_addr, map_addr); 2984 int obj_size = RestoreMap(obj, heap()->code_space(), new_addr, map_addr);
2975 2985
2976 Address old_addr = obj->address(); 2986 Address old_addr = obj->address();
2977 2987
2978 if (new_addr != old_addr) { 2988 if (new_addr != old_addr) {
2979 // Move contents. 2989 // Move contents.
2980 heap_->MoveBlock(new_addr, old_addr, obj_size); 2990 heap()->MoveBlock(new_addr, old_addr, obj_size);
2981 } 2991 }
2982 2992
2983 HeapObject* copied_to = HeapObject::FromAddress(new_addr); 2993 HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2984 if (copied_to->IsCode()) { 2994 if (copied_to->IsCode()) {
2985 // May also update inline cache target. 2995 // May also update inline cache target.
2986 Code::cast(copied_to)->Relocate(new_addr - old_addr); 2996 Code::cast(copied_to)->Relocate(new_addr - old_addr);
2987 // Notify the logger that compiled code has moved. 2997 // Notify the logger that compiled code has moved.
2988 PROFILE(heap_->isolate(), CodeMoveEvent(old_addr, new_addr)); 2998 PROFILE(heap()->isolate(), CodeMoveEvent(old_addr, new_addr));
2989 } 2999 }
2990 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); 3000 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
2991 3001
2992 return obj_size; 3002 return obj_size;
2993 } 3003 }
2994 3004
2995 3005
2996 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { 3006 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
2997 int obj_size = obj->Size(); 3007 int obj_size = obj->Size();
2998 3008
2999 // Get forwarding address 3009 // Get forwarding address
3000 Address old_addr = obj->address(); 3010 Address old_addr = obj->address();
3001 int offset = heap_->new_space()->ToSpaceOffsetForAddress(old_addr); 3011 int offset = heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
3002 3012
3003 Address new_addr = 3013 Address new_addr =
3004 Memory::Address_at(heap_->new_space()->FromSpaceLow() + offset); 3014 Memory::Address_at(heap()->new_space()->FromSpaceLow() + offset);
3005 3015
3006 #ifdef DEBUG 3016 #ifdef DEBUG
3007 if (heap_->new_space()->FromSpaceContains(new_addr)) { 3017 if (heap()->new_space()->FromSpaceContains(new_addr)) {
3008 ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <= 3018 ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
3009 heap_->new_space()->ToSpaceOffsetForAddress(old_addr)); 3019 heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
3010 } else { 3020 } else {
3011 ASSERT(heap_->TargetSpace(obj) == heap_->old_pointer_space() || 3021 ASSERT(heap()->TargetSpace(obj) == heap()->old_pointer_space() ||
3012 heap_->TargetSpace(obj) == heap_->old_data_space()); 3022 heap()->TargetSpace(obj) == heap()->old_data_space());
3013 } 3023 }
3014 #endif 3024 #endif
3015 3025
3016 // New and old addresses cannot overlap. 3026 // New and old addresses cannot overlap.
3017 if (heap_->InNewSpace(HeapObject::FromAddress(new_addr))) { 3027 if (heap()->InNewSpace(HeapObject::FromAddress(new_addr))) {
3018 heap_->CopyBlock(new_addr, old_addr, obj_size); 3028 heap()->CopyBlock(new_addr, old_addr, obj_size);
3019 } else { 3029 } else {
3020 heap_->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr, 3030 heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
3021 old_addr, 3031 old_addr,
3022 obj_size); 3032 obj_size);
3023 } 3033 }
3024 3034
3025 #ifdef DEBUG 3035 #ifdef DEBUG
3026 if (FLAG_gc_verbose) { 3036 if (FLAG_gc_verbose) {
3027 PrintF("relocate %p -> %p\n", old_addr, new_addr); 3037 PrintF("relocate %p -> %p\n", old_addr, new_addr);
3028 } 3038 }
3029 #endif 3039 #endif
3030 3040
3031 HeapObject* copied_to = HeapObject::FromAddress(new_addr); 3041 HeapObject* copied_to = HeapObject::FromAddress(new_addr);
3032 if (copied_to->IsSharedFunctionInfo()) { 3042 if (copied_to->IsSharedFunctionInfo()) {
3033 PROFILE(heap_->isolate(), 3043 PROFILE(heap()->isolate(),
3034 SharedFunctionInfoMoveEvent(old_addr, new_addr)); 3044 SharedFunctionInfoMoveEvent(old_addr, new_addr));
3035 } 3045 }
3036 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); 3046 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3037 3047
3038 return obj_size; 3048 return obj_size;
3039 } 3049 }
3040 3050
3041 3051
3042 void MarkCompactCollector::EnableCodeFlushing(bool enable) { 3052 void MarkCompactCollector::EnableCodeFlushing(bool enable) {
3043 if (enable) { 3053 if (enable) {
3044 if (code_flusher_ != NULL) return; 3054 if (code_flusher_ != NULL) return;
3045 code_flusher_ = new CodeFlusher(heap_->isolate()); 3055 code_flusher_ = new CodeFlusher(heap()->isolate());
3046 } else { 3056 } else {
3047 if (code_flusher_ == NULL) return; 3057 if (code_flusher_ == NULL) return;
3048 delete code_flusher_; 3058 delete code_flusher_;
3049 code_flusher_ = NULL; 3059 code_flusher_ = NULL;
3050 } 3060 }
3051 } 3061 }
3052 3062
3053 3063
3054 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, 3064 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
3055 Isolate* isolate) { 3065 Isolate* isolate) {
(...skipping 17 matching lines...) Expand all
3073 } 3083 }
3074 3084
3075 3085
3076 void MarkCompactCollector::Initialize() { 3086 void MarkCompactCollector::Initialize() {
3077 StaticPointersToNewGenUpdatingVisitor::Initialize(); 3087 StaticPointersToNewGenUpdatingVisitor::Initialize();
3078 StaticMarkingVisitor::Initialize(); 3088 StaticMarkingVisitor::Initialize();
3079 } 3089 }
3080 3090
3081 3091
3082 } } // namespace v8::internal 3092 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/x64/assembler-x64-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698