Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/mark-compact.cc

Issue 7032005: Unify markbits for old and new spaces. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
61 live_code_objects_size_(0), 61 live_code_objects_size_(0),
62 live_map_objects_size_(0), 62 live_map_objects_size_(0),
63 live_cell_objects_size_(0), 63 live_cell_objects_size_(0),
64 live_lo_objects_size_(0), 64 live_lo_objects_size_(0),
65 live_bytes_(0), 65 live_bytes_(0),
66 #endif 66 #endif
67 heap_(NULL), 67 heap_(NULL),
68 code_flusher_(NULL) { } 68 code_flusher_(NULL) { }
69 69
70 70
71 bool Marking::Setup() {
72 if (new_space_bitmap_ == NULL) {
73 // TODO(gc) ISOLATES
74 int markbits_per_newspace =
75 (2*HEAP->ReservedSemiSpaceSize()) >> kPointerSizeLog2;
76
77 new_space_bitmap_ =
78 BitmapStorageDescriptor::Allocate(
79 NewSpaceMarkbitsBitmap::CellsForLength(markbits_per_newspace));
80 }
81 return new_space_bitmap_ != NULL;
82 }
83
84
85 void Marking::TearDown() {
86 if (new_space_bitmap_ != NULL) {
87 BitmapStorageDescriptor::Free(new_space_bitmap_);
88 new_space_bitmap_ = NULL;
89 }
90 }
91
92
93 #ifdef DEBUG 71 #ifdef DEBUG
94 class VerifyMarkingVisitor: public ObjectVisitor { 72 class VerifyMarkingVisitor: public ObjectVisitor {
95 public: 73 public:
96 void VisitPointers(Object** start, Object** end) { 74 void VisitPointers(Object** start, Object** end) {
97 for (Object** current = start; current < end; current++) { 75 for (Object** current = start; current < end; current++) {
98 if ((*current)->IsHeapObject()) { 76 if ((*current)->IsHeapObject()) {
99 HeapObject* object = HeapObject::cast(*current); 77 HeapObject* object = HeapObject::cast(*current);
100 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); 78 ASSERT(HEAP->mark_compact_collector()->IsMarked(object));
101 } 79 }
102 } 80 }
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
205 ASSERT(p->markbits()->IsClean()); 183 ASSERT(p->markbits()->IsClean());
206 } 184 }
207 } 185 }
208 186
209 static void VerifyMarkbitsAreClean() { 187 static void VerifyMarkbitsAreClean() {
210 VerifyMarkbitsAreClean(HEAP->old_pointer_space()); 188 VerifyMarkbitsAreClean(HEAP->old_pointer_space());
211 VerifyMarkbitsAreClean(HEAP->old_data_space()); 189 VerifyMarkbitsAreClean(HEAP->old_data_space());
212 VerifyMarkbitsAreClean(HEAP->code_space()); 190 VerifyMarkbitsAreClean(HEAP->code_space());
213 VerifyMarkbitsAreClean(HEAP->cell_space()); 191 VerifyMarkbitsAreClean(HEAP->cell_space());
214 VerifyMarkbitsAreClean(HEAP->map_space()); 192 VerifyMarkbitsAreClean(HEAP->map_space());
193 ASSERT(HEAP->new_space()->ActivePage()->markbits()->IsClean());
215 } 194 }
216 #endif 195 #endif
217 196
218 197
219 static void ClearMarkbits(PagedSpace* space) { 198 static void ClearMarkbits(PagedSpace* space) {
220 PageIterator it(space); 199 PageIterator it(space);
221 200
222 while (it.has_next()) { 201 while (it.has_next()) {
223 Page* p = it.next(); 202 Page* p = it.next();
224 p->markbits()->Clear(); 203 p->markbits()->Clear();
225 } 204 }
226 } 205 }
227 206
228 207
229 static void ClearMarkbits() { 208 static void ClearMarkbits() {
230 // TODO(gc): Clean the mark bits while sweeping. 209 // TODO(gc): Clean the mark bits while sweeping.
231 ClearMarkbits(HEAP->code_space()); 210 ClearMarkbits(HEAP->code_space());
232 ClearMarkbits(HEAP->map_space()); 211 ClearMarkbits(HEAP->map_space());
233 ClearMarkbits(HEAP->old_pointer_space()); 212 ClearMarkbits(HEAP->old_pointer_space());
234 ClearMarkbits(HEAP->old_data_space()); 213 ClearMarkbits(HEAP->old_data_space());
235 ClearMarkbits(HEAP->cell_space()); 214 ClearMarkbits(HEAP->cell_space());
215 HEAP->new_space()->ActivePage()->markbits()->Clear();
236 } 216 }
237 217
238 218
239 void Marking::TransferMark(Address old_start, Address new_start) { 219 void Marking::TransferMark(Address old_start, Address new_start) {
240 if (old_start == new_start) return; 220 if (old_start == new_start) return;
241 221
242 MarkBit new_mark_bit = MarkBitFrom(new_start); 222 MarkBit new_mark_bit = MarkBitFrom(new_start);
243 223
244 if (heap_->incremental_marking()->IsMarking()) { 224 if (heap_->incremental_marking()->IsMarking()) {
245 MarkBit old_mark_bit = MarkBitFrom(old_start); 225 MarkBit old_mark_bit = MarkBitFrom(old_start);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
305 } 285 }
306 #endif 286 #endif
307 287
308 PagedSpaces spaces; 288 PagedSpaces spaces;
309 for (PagedSpace* space = spaces.next(); 289 for (PagedSpace* space = spaces.next();
310 space != NULL; space = spaces.next()) { 290 space != NULL; space = spaces.next()) {
311 space->PrepareForMarkCompact(compacting_collection_); 291 space->PrepareForMarkCompact(compacting_collection_);
312 } 292 }
313 293
314 if (!heap()->incremental_marking()->IsMarking()) { 294 if (!heap()->incremental_marking()->IsMarking()) {
315 Address new_space_bottom = heap()->new_space()->bottom();
316 uintptr_t new_space_size =
317 RoundUp(heap()->new_space()->top() - new_space_bottom,
318 32 * kPointerSize);
319
320 heap()->marking()->ClearRange(new_space_bottom, new_space_size);
321
322 ClearMarkbits(); 295 ClearMarkbits();
323 #ifdef DEBUG 296 #ifdef DEBUG
324 VerifyMarkbitsAreClean(); 297 VerifyMarkbitsAreClean();
325 #endif 298 #endif
326 } 299 }
327 300
328 #ifdef DEBUG 301 #ifdef DEBUG
329 live_bytes_ = 0; 302 live_bytes_ = 0;
330 live_young_objects_size_ = 0; 303 live_young_objects_size_ = 0;
331 live_old_pointer_objects_size_ = 0; 304 live_old_pointer_objects_size_ = 0;
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); 409 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
437 410
438 JSFunction* candidate = jsfunction_candidates_head_; 411 JSFunction* candidate = jsfunction_candidates_head_;
439 JSFunction* next_candidate; 412 JSFunction* next_candidate;
440 while (candidate != NULL) { 413 while (candidate != NULL) {
441 next_candidate = GetNextCandidate(candidate); 414 next_candidate = GetNextCandidate(candidate);
442 415
443 SharedFunctionInfo* shared = candidate->unchecked_shared(); 416 SharedFunctionInfo* shared = candidate->unchecked_shared();
444 417
445 Code* code = shared->unchecked_code(); 418 Code* code = shared->unchecked_code();
446 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 419 MarkBit code_mark = Marking::MarkBitFrom(code);
447 if (!code_mark.Get()) { 420 if (!code_mark.Get()) {
448 shared->set_code(lazy_compile); 421 shared->set_code(lazy_compile);
449 candidate->set_code(lazy_compile); 422 candidate->set_code(lazy_compile);
450 } else { 423 } else {
451 candidate->set_code(shared->unchecked_code()); 424 candidate->set_code(shared->unchecked_code());
452 } 425 }
453 426
454 candidate = next_candidate; 427 candidate = next_candidate;
455 } 428 }
456 429
457 jsfunction_candidates_head_ = NULL; 430 jsfunction_candidates_head_ = NULL;
458 } 431 }
459 432
460 433
461 void ProcessSharedFunctionInfoCandidates() { 434 void ProcessSharedFunctionInfoCandidates() {
462 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); 435 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
463 436
464 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; 437 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
465 SharedFunctionInfo* next_candidate; 438 SharedFunctionInfo* next_candidate;
466 while (candidate != NULL) { 439 while (candidate != NULL) {
467 next_candidate = GetNextCandidate(candidate); 440 next_candidate = GetNextCandidate(candidate);
468 SetNextCandidate(candidate, NULL); 441 SetNextCandidate(candidate, NULL);
469 442
470 Code* code = candidate->unchecked_code(); 443 Code* code = candidate->unchecked_code();
471 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 444 MarkBit code_mark = Marking::MarkBitFrom(code);
472 if (!code_mark.Get()) { 445 if (!code_mark.Get()) {
473 candidate->set_code(lazy_compile); 446 candidate->set_code(lazy_compile);
474 } 447 }
475 448
476 candidate = next_candidate; 449 candidate = next_candidate;
477 } 450 }
478 451
479 shared_function_info_candidates_head_ = NULL; 452 shared_function_info_candidates_head_ = NULL;
480 } 453 }
481 454
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
646 } 619 }
647 620
648 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) { 621 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
649 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 622 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
650 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); 623 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
651 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { 624 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
652 IC::Clear(rinfo->pc()); 625 IC::Clear(rinfo->pc());
653 // Please note targets for cleared inline cached do not have to be 626 // Please note targets for cleared inline cached do not have to be
654 // marked since they are contained in HEAP->non_monomorphic_cache(). 627 // marked since they are contained in HEAP->non_monomorphic_cache().
655 } else { 628 } else {
656 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 629 MarkBit code_mark = Marking::MarkBitFrom(code);
657 heap->mark_compact_collector()->MarkObject(code, code_mark); 630 heap->mark_compact_collector()->MarkObject(code, code_mark);
658 } 631 }
659 } 632 }
660 633
661 static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) { 634 static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
662 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); 635 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
663 Object* cell = rinfo->target_cell(); 636 Object* cell = rinfo->target_cell();
664 Object* old_cell = cell; 637 Object* old_cell = cell;
665 VisitPointer(heap, &cell); 638 VisitPointer(heap, &cell);
666 if (cell != old_cell) { 639 if (cell != old_cell) {
667 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell), 640 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell),
668 NULL); 641 NULL);
669 } 642 }
670 } 643 }
671 644
672 static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) { 645 static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
673 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 646 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
674 rinfo->IsPatchedReturnSequence()) || 647 rinfo->IsPatchedReturnSequence()) ||
675 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 648 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
676 rinfo->IsPatchedDebugBreakSlotSequence())); 649 rinfo->IsPatchedDebugBreakSlotSequence()));
677 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); 650 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
678 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 651 MarkBit code_mark = Marking::MarkBitFrom(code);
679 heap->mark_compact_collector()->MarkObject(code, code_mark); 652 heap->mark_compact_collector()->MarkObject(code, code_mark);
680 } 653 }
681 654
682 // Mark object pointed to by p. 655 // Mark object pointed to by p.
683 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { 656 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
684 if (!(*p)->IsHeapObject()) return; 657 if (!(*p)->IsHeapObject()) return;
685 HeapObject* object = ShortCircuitConsString(p); 658 HeapObject* object = ShortCircuitConsString(p);
686 MarkBit mark = heap->marking()->MarkBitFrom(object); 659 MarkBit mark = heap->marking()->MarkBitFrom(object);
687 heap->mark_compact_collector()->MarkObject(object, mark); 660 heap->mark_compact_collector()->MarkObject(object, mark);
688 } 661 }
689 662
690 663
691 // Visit an unmarked object. 664 // Visit an unmarked object.
692 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, 665 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
693 HeapObject* obj)) { 666 HeapObject* obj)) {
694 #ifdef DEBUG 667 #ifdef DEBUG
695 ASSERT(Isolate::Current()->heap()->Contains(obj)); 668 ASSERT(Isolate::Current()->heap()->Contains(obj));
696 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj)); 669 ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj));
697 #endif 670 #endif
698 Map* map = obj->map(); 671 Map* map = obj->map();
699 Heap* heap = obj->GetHeap(); 672 Heap* heap = obj->GetHeap();
700 // TODO(gc) ISOLATES MERGE 673 // TODO(gc) ISOLATES MERGE
701 MarkBit mark = heap->marking()->MarkBitFrom(obj); 674 MarkBit mark = heap->marking()->MarkBitFrom(obj);
702 heap->mark_compact_collector()->SetMark(obj, mark); 675 heap->mark_compact_collector()->SetMark(obj, mark);
703 // Mark the map pointer and the body. 676 // Mark the map pointer and the body.
704 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); 677 MarkBit map_mark = Marking::MarkBitFrom(map);
705 heap->mark_compact_collector()->MarkObject(map, map_mark); 678 heap->mark_compact_collector()->MarkObject(map, map_mark);
706 IterateBody(map, obj); 679 IterateBody(map, obj);
707 } 680 }
708 681
709 // Visit all unmarked objects pointed to by [start, end). 682 // Visit all unmarked objects pointed to by [start, end).
710 // Returns false if the operation fails (lack of stack space). 683 // Returns false if the operation fails (lack of stack space).
711 static inline bool VisitUnmarkedObjects(Heap* heap, 684 static inline bool VisitUnmarkedObjects(Heap* heap,
712 Object** start, 685 Object** start,
713 Object** end) { 686 Object** end) {
714 // Return false is we are close to the stack limit. 687 // Return false is we are close to the stack limit.
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 return function->unchecked_code() != 750 return function->unchecked_code() !=
778 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); 751 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
779 } 752 }
780 753
781 inline static bool IsFlushable(Heap* heap, JSFunction* function) { 754 inline static bool IsFlushable(Heap* heap, JSFunction* function) {
782 SharedFunctionInfo* shared_info = function->unchecked_shared(); 755 SharedFunctionInfo* shared_info = function->unchecked_shared();
783 756
784 // Code is either on stack, in compilation cache or referenced 757 // Code is either on stack, in compilation cache or referenced
785 // by optimized version of function. 758 // by optimized version of function.
786 MarkBit code_mark = 759 MarkBit code_mark =
787 Marking::MarkBitFromOldSpace(function->unchecked_code()); 760 Marking::MarkBitFrom(function->unchecked_code());
788 if (code_mark.Get()) { 761 if (code_mark.Get()) {
789 shared_info->set_code_age(0); 762 shared_info->set_code_age(0);
790 return false; 763 return false;
791 } 764 }
792 765
793 // We do not flush code for optimized functions. 766 // We do not flush code for optimized functions.
794 if (function->code() != shared_info->unchecked_code()) { 767 if (function->code() != shared_info->unchecked_code()) {
795 return false; 768 return false;
796 } 769 }
797 770
798 return IsFlushable(heap, shared_info); 771 return IsFlushable(heap, shared_info);
799 } 772 }
800 773
801 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) { 774 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
802 // Code is either on stack, in compilation cache or referenced 775 // Code is either on stack, in compilation cache or referenced
803 // by optimized version of function. 776 // by optimized version of function.
804 MarkBit code_mark = 777 MarkBit code_mark =
805 Marking::MarkBitFromOldSpace(shared_info->unchecked_code()); 778 Marking::MarkBitFrom(shared_info->unchecked_code());
806 if (code_mark.Get()) { 779 if (code_mark.Get()) {
807 shared_info->set_code_age(0); 780 shared_info->set_code_age(0);
808 return false; 781 return false;
809 } 782 }
810 783
811 // The function must be compiled and have the source code available, 784 // The function must be compiled and have the source code available,
812 // to be able to recompile it in case we need the function again. 785 // to be able to recompile it in case we need the function again.
813 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) { 786 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
814 return false; 787 return false;
815 } 788 }
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
947 920
948 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); 921 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
949 // The function must have a valid context and not be a builtin. 922 // The function must have a valid context and not be a builtin.
950 bool flush_code_candidate = false; 923 bool flush_code_candidate = false;
951 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { 924 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
952 flush_code_candidate = FlushCodeForFunction(heap, jsfunction); 925 flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
953 } 926 }
954 927
955 if (!flush_code_candidate) { 928 if (!flush_code_candidate) {
956 Code* code = jsfunction->unchecked_shared()->unchecked_code(); 929 Code* code = jsfunction->unchecked_shared()->unchecked_code();
957 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 930 MarkBit code_mark = Marking::MarkBitFrom(code);
958 HEAP->mark_compact_collector()->MarkObject(code, code_mark); 931 HEAP->mark_compact_collector()->MarkObject(code, code_mark);
959 932
960 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { 933 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
961 // For optimized functions we should retain both non-optimized version 934 // For optimized functions we should retain both non-optimized version
962 // of it's code and non-optimized version of all inlined functions. 935 // of it's code and non-optimized version of all inlined functions.
963 // This is required to support bailing out from inlined code. 936 // This is required to support bailing out from inlined code.
964 DeoptimizationInputData* data = 937 DeoptimizationInputData* data =
965 reinterpret_cast<DeoptimizationInputData*>( 938 reinterpret_cast<DeoptimizationInputData*>(
966 jsfunction->unchecked_code()->unchecked_deoptimization_data()); 939 jsfunction->unchecked_code()->unchecked_deoptimization_data());
967 940
968 FixedArray* literals = data->UncheckedLiteralArray(); 941 FixedArray* literals = data->UncheckedLiteralArray();
969 942
970 for (int i = 0, count = data->InlinedFunctionCount()->value(); 943 for (int i = 0, count = data->InlinedFunctionCount()->value();
971 i < count; 944 i < count;
972 i++) { 945 i++) {
973 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); 946 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
974 Code* inlined_code = inlined->unchecked_shared()->unchecked_code(); 947 Code* inlined_code = inlined->unchecked_shared()->unchecked_code();
975 MarkBit inlined_code_mark = 948 MarkBit inlined_code_mark =
976 Marking::MarkBitFromOldSpace(inlined_code); 949 Marking::MarkBitFrom(inlined_code);
977 HEAP->mark_compact_collector()->MarkObject( 950 HEAP->mark_compact_collector()->MarkObject(
978 inlined_code, inlined_code_mark); 951 inlined_code, inlined_code_mark);
979 } 952 }
980 } 953 }
981 } 954 }
982 955
983 VisitJSFunctionFields(map, 956 VisitJSFunctionFields(map,
984 reinterpret_cast<JSFunction*>(object), 957 reinterpret_cast<JSFunction*>(object),
985 flush_code_candidate); 958 flush_code_candidate);
986 } 959 }
(...skipping 24 matching lines...) Expand all
1011 } else { 984 } else {
1012 // Don't visit code object. 985 // Don't visit code object.
1013 986
1014 // Visit shared function info to avoid double checking of it's 987 // Visit shared function info to avoid double checking of it's
1015 // flushability. 988 // flushability.
1016 SharedFunctionInfo* shared_info = object->unchecked_shared(); 989 SharedFunctionInfo* shared_info = object->unchecked_shared();
1017 MarkBit shared_info_mark = heap->marking()->MarkBitFrom(shared_info); 990 MarkBit shared_info_mark = heap->marking()->MarkBitFrom(shared_info);
1018 if (!shared_info_mark.Get()) { 991 if (!shared_info_mark.Get()) {
1019 Map* shared_info_map = shared_info->map(); 992 Map* shared_info_map = shared_info->map();
1020 MarkBit shared_info_map_mark = 993 MarkBit shared_info_map_mark =
1021 Marking::MarkBitFromOldSpace(shared_info_map); 994 Marking::MarkBitFrom(shared_info_map);
1022 HEAP->mark_compact_collector()->SetMark(shared_info, shared_info_mark); 995 HEAP->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
1023 HEAP->mark_compact_collector()->MarkObject(shared_info_map, 996 HEAP->mark_compact_collector()->MarkObject(shared_info_map,
1024 shared_info_map_mark); 997 shared_info_map_mark);
1025 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, 998 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
1026 shared_info, 999 shared_info,
1027 true); 1000 true);
1028 } 1001 }
1029 } 1002 }
1030 1003
1031 VisitPointers(heap, 1004 VisitPointers(heap,
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1093 1066
1094 1067
1095 class CodeMarkingVisitor : public ThreadVisitor { 1068 class CodeMarkingVisitor : public ThreadVisitor {
1096 public: 1069 public:
1097 explicit CodeMarkingVisitor(MarkCompactCollector* collector) 1070 explicit CodeMarkingVisitor(MarkCompactCollector* collector)
1098 : collector_(collector) {} 1071 : collector_(collector) {}
1099 1072
1100 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 1073 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
1101 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { 1074 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1102 Code* code = it.frame()->unchecked_code(); 1075 Code* code = it.frame()->unchecked_code();
1103 MarkBit code_bit = Marking::MarkBitFromOldSpace(code); 1076 MarkBit code_bit = Marking::MarkBitFrom(code);
1104 HEAP->mark_compact_collector()->MarkObject( 1077 HEAP->mark_compact_collector()->MarkObject(
1105 it.frame()->unchecked_code(), code_bit); 1078 it.frame()->unchecked_code(), code_bit);
1106 } 1079 }
1107 } 1080 }
1108 1081
1109 private: 1082 private:
1110 MarkCompactCollector* collector_; 1083 MarkCompactCollector* collector_;
1111 }; 1084 };
1112 1085
1113 1086
1114 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { 1087 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
1115 public: 1088 public:
1116 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) 1089 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
1117 : collector_(collector) {} 1090 : collector_(collector) {}
1118 1091
1119 void VisitPointers(Object** start, Object** end) { 1092 void VisitPointers(Object** start, Object** end) {
1120 for (Object** p = start; p < end; p++) VisitPointer(p); 1093 for (Object** p = start; p < end; p++) VisitPointer(p);
1121 } 1094 }
1122 1095
1123 void VisitPointer(Object** slot) { 1096 void VisitPointer(Object** slot) {
1124 Object* obj = *slot; 1097 Object* obj = *slot;
1125 if (obj->IsSharedFunctionInfo()) { 1098 if (obj->IsSharedFunctionInfo()) {
1126 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); 1099 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
1127 // TODO(gc) ISOLATES MERGE 1100 // TODO(gc) ISOLATES MERGE
1128 MarkBit shared_mark = HEAP->marking()->MarkBitFrom(shared); 1101 MarkBit shared_mark = HEAP->marking()->MarkBitFrom(shared);
1129 MarkBit code_mark = 1102 MarkBit code_mark =
1130 HEAP->marking()->MarkBitFromOldSpace(shared->unchecked_code()); 1103 HEAP->marking()->MarkBitFrom(shared->unchecked_code());
1131 HEAP->mark_compact_collector()->MarkObject(shared->unchecked_code(), 1104 HEAP->mark_compact_collector()->MarkObject(shared->unchecked_code(),
1132 code_mark); 1105 code_mark);
1133 HEAP->mark_compact_collector()->MarkObject(shared, shared_mark); 1106 HEAP->mark_compact_collector()->MarkObject(shared, shared_mark);
1134 } 1107 }
1135 } 1108 }
1136 1109
1137 private: 1110 private:
1138 MarkCompactCollector* collector_; 1111 MarkCompactCollector* collector_;
1139 }; 1112 };
1140 1113
(...skipping 20 matching lines...) Expand all
1161 HeapObject* descriptor_array = heap()->raw_unchecked_empty_descriptor_array(); 1134 HeapObject* descriptor_array = heap()->raw_unchecked_empty_descriptor_array();
1162 // TODO(gc) ISOLATES MERGE 1135 // TODO(gc) ISOLATES MERGE
1163 MarkBit descriptor_array_mark = 1136 MarkBit descriptor_array_mark =
1164 heap()->marking()->MarkBitFrom(descriptor_array); 1137 heap()->marking()->MarkBitFrom(descriptor_array);
1165 MarkObject(descriptor_array, descriptor_array_mark); 1138 MarkObject(descriptor_array, descriptor_array_mark);
1166 1139
1167 // Make sure we are not referencing the code from the stack. 1140 // Make sure we are not referencing the code from the stack.
1168 ASSERT(this == heap()->mark_compact_collector()); 1141 ASSERT(this == heap()->mark_compact_collector());
1169 for (StackFrameIterator it; !it.done(); it.Advance()) { 1142 for (StackFrameIterator it; !it.done(); it.Advance()) {
1170 Code* code = it.frame()->unchecked_code(); 1143 Code* code = it.frame()->unchecked_code();
1171 MarkBit code_mark = Marking::MarkBitFromOldSpace(code); 1144 MarkBit code_mark = Marking::MarkBitFrom(code);
1172 MarkObject(code, code_mark); 1145 MarkObject(code, code_mark);
1173 } 1146 }
1174 1147
1175 // Iterate the archived stacks in all threads to check if 1148 // Iterate the archived stacks in all threads to check if
1176 // the code is referenced. 1149 // the code is referenced.
1177 CodeMarkingVisitor code_marking_visitor(this); 1150 CodeMarkingVisitor code_marking_visitor(this);
1178 heap()->isolate()->thread_manager()->IterateArchivedThreads( 1151 heap()->isolate()->thread_manager()->IterateArchivedThreads(
1179 &code_marking_visitor); 1152 &code_marking_visitor);
1180 1153
1181 SharedFunctionInfoMarkingVisitor visitor(this); 1154 SharedFunctionInfoMarkingVisitor visitor(this);
(...skipping 26 matching lines...) Expand all
1208 HeapObject* object = ShortCircuitConsString(p); 1181 HeapObject* object = ShortCircuitConsString(p);
1209 // TODO(gc) ISOLATES MERGE 1182 // TODO(gc) ISOLATES MERGE
1210 MarkBit mark_bit = HEAP->marking()->MarkBitFrom(object); 1183 MarkBit mark_bit = HEAP->marking()->MarkBitFrom(object);
1211 if (mark_bit.Get()) return; 1184 if (mark_bit.Get()) return;
1212 1185
1213 Map* map = object->map(); 1186 Map* map = object->map();
1214 // Mark the object. 1187 // Mark the object.
1215 HEAP->mark_compact_collector()->SetMark(object, mark_bit); 1188 HEAP->mark_compact_collector()->SetMark(object, mark_bit);
1216 1189
1217 // Mark the map pointer and body, and push them on the marking stack. 1190 // Mark the map pointer and body, and push them on the marking stack.
1218 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); 1191 MarkBit map_mark = Marking::MarkBitFrom(map);
1219 HEAP->mark_compact_collector()->MarkObject(map, map_mark); 1192 HEAP->mark_compact_collector()->MarkObject(map, map_mark);
1220 StaticMarkingVisitor::IterateBody(map, object); 1193 StaticMarkingVisitor::IterateBody(map, object);
1221 1194
1222 // Mark all the objects reachable from the map and body. May leave 1195 // Mark all the objects reachable from the map and body. May leave
1223 // overflowed objects in the heap. 1196 // overflowed objects in the heap.
1224 collector_->EmptyMarkingDeque(); 1197 collector_->EmptyMarkingDeque();
1225 } 1198 }
1226 1199
1227 MarkCompactCollector* collector_; 1200 MarkCompactCollector* collector_;
1228 }; 1201 };
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
1544 // marking stack have been marked, or are overflowed in the heap. 1517 // marking stack have been marked, or are overflowed in the heap.
1545 void MarkCompactCollector::EmptyMarkingDeque() { 1518 void MarkCompactCollector::EmptyMarkingDeque() {
1546 while (!marking_deque_.IsEmpty()) { 1519 while (!marking_deque_.IsEmpty()) {
1547 HeapObject* object = marking_deque_.Pop(); 1520 HeapObject* object = marking_deque_.Pop();
1548 ASSERT(object->IsHeapObject()); 1521 ASSERT(object->IsHeapObject());
1549 ASSERT(heap()->Contains(object)); 1522 ASSERT(heap()->Contains(object));
1550 ASSERT(IsMarked(object)); 1523 ASSERT(IsMarked(object));
1551 ASSERT(!object->IsOverflowed()); 1524 ASSERT(!object->IsOverflowed());
1552 1525
1553 Map* map = object->map(); 1526 Map* map = object->map();
1554 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); 1527 MarkBit map_mark = Marking::MarkBitFrom(map);
1555 MarkObject(map, map_mark); 1528 MarkObject(map, map_mark);
1556 1529
1557 StaticMarkingVisitor::IterateBody(map, object); 1530 StaticMarkingVisitor::IterateBody(map, object);
1558 } 1531 }
1559 } 1532 }
1560 1533
1561 1534
1562 // Sweep the heap for overflowed objects, clear their overflow bits, and 1535 // Sweep the heap for overflowed objects, clear their overflow bits, and
1563 // push them on the marking stack. Stop early if the marking stack fills 1536 // push them on the marking stack. Stop early if the marking stack fills
1564 // before sweeping completes. If sweeping completes, there are no remaining 1537 // before sweeping completes. If sweeping completes, there are no remaining
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
1749 // dropping the back pointers temporarily stored in the prototype field. 1722 // dropping the back pointers temporarily stored in the prototype field.
1750 // Setting the prototype field requires following the linked list of 1723 // Setting the prototype field requires following the linked list of
1751 // back pointers, reversing them all at once. This allows us to find 1724 // back pointers, reversing them all at once. This allows us to find
1752 // those maps with map transitions that need to be nulled, and only 1725 // those maps with map transitions that need to be nulled, and only
1753 // scan the descriptor arrays of those maps, not all maps. 1726 // scan the descriptor arrays of those maps, not all maps.
1754 // All of these actions are carried out only on maps of JSObjects 1727 // All of these actions are carried out only on maps of JSObjects
1755 // and related subtypes. 1728 // and related subtypes.
1756 for (HeapObject* obj = map_iterator.Next(); 1729 for (HeapObject* obj = map_iterator.Next();
1757 obj != NULL; obj = map_iterator.Next()) { 1730 obj != NULL; obj = map_iterator.Next()) {
1758 Map* map = reinterpret_cast<Map*>(obj); 1731 Map* map = reinterpret_cast<Map*>(obj);
1759 MarkBit map_mark = Marking::MarkBitFromOldSpace(map); 1732 MarkBit map_mark = Marking::MarkBitFrom(map);
1760 if (map->IsFreeSpace()) continue; 1733 if (map->IsFreeSpace()) continue;
1761 1734
1762 ASSERT(SafeIsMap(map)); 1735 ASSERT(SafeIsMap(map));
1763 // Only JSObject and subtypes have map transitions and back pointers. 1736 // Only JSObject and subtypes have map transitions and back pointers.
1764 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; 1737 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
1765 if (map->instance_type() > JS_FUNCTION_TYPE) continue; 1738 if (map->instance_type() > JS_FUNCTION_TYPE) continue;
1766 1739
1767 if (map_mark.Get() && 1740 if (map_mark.Get() &&
1768 map->attached_to_shared_function_info()) { 1741 map->attached_to_shared_function_info()) {
1769 // This map is used for inobject slack tracking and has been detached 1742 // This map is used for inobject slack tracking and has been detached
1770 // from SharedFunctionInfo during the mark phase. 1743 // from SharedFunctionInfo during the mark phase.
1771 // Since it survived the GC, reattach it now. 1744 // Since it survived the GC, reattach it now.
1772 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); 1745 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
1773 } 1746 }
1774 1747
1775 // Clear dead prototype transitions. 1748 // Clear dead prototype transitions.
1776 FixedArray* prototype_transitions = map->unchecked_prototype_transitions(); 1749 FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
1777 if (prototype_transitions->length() > 0) { 1750 if (prototype_transitions->length() > 0) {
1778 int finger = Smi::cast(prototype_transitions->get(0))->value(); 1751 int finger = Smi::cast(prototype_transitions->get(0))->value();
1779 int new_finger = 1; 1752 int new_finger = 1;
1780 for (int i = 1; i < finger; i += 2) { 1753 for (int i = 1; i < finger; i += 2) {
1781 HeapObject* prototype = HeapObject::cast(prototype_transitions->get(i)); 1754 HeapObject* prototype = HeapObject::cast(prototype_transitions->get(i));
1782 Map* cached_map = Map::cast(prototype_transitions->get(i + 1)); 1755 Map* cached_map = Map::cast(prototype_transitions->get(i + 1));
1783 MarkBit prototype_mark = heap()->marking()->MarkBitFrom(prototype); 1756 MarkBit prototype_mark = heap()->marking()->MarkBitFrom(prototype);
1784 MarkBit cached_map_mark = Marking::MarkBitFromOldSpace(cached_map); 1757 MarkBit cached_map_mark = Marking::MarkBitFrom(cached_map);
1785 if (prototype_mark.Get() && cached_map_mark.Get()) { 1758 if (prototype_mark.Get() && cached_map_mark.Get()) {
1786 if (new_finger != i) { 1759 if (new_finger != i) {
1787 prototype_transitions->set_unchecked(heap_, 1760 prototype_transitions->set_unchecked(heap_,
1788 new_finger, 1761 new_finger,
1789 prototype, 1762 prototype,
1790 UPDATE_WRITE_BARRIER); 1763 UPDATE_WRITE_BARRIER);
1791 prototype_transitions->set_unchecked(heap_, 1764 prototype_transitions->set_unchecked(heap_,
1792 new_finger + 1, 1765 new_finger + 1,
1793 cached_map, 1766 cached_map,
1794 SKIP_WRITE_BARRIER); 1767 SKIP_WRITE_BARRIER);
(...skipping 22 matching lines...) Expand all
1817 Object* real_prototype = current; 1790 Object* real_prototype = current;
1818 1791
1819 // Follow back pointers, setting them to prototype, 1792 // Follow back pointers, setting them to prototype,
1820 // clearing map transitions when necessary. 1793 // clearing map transitions when necessary.
1821 current = map; 1794 current = map;
1822 bool on_dead_path = !map_mark.Get(); 1795 bool on_dead_path = !map_mark.Get();
1823 Object* next; 1796 Object* next;
1824 while (SafeIsMap(current)) { 1797 while (SafeIsMap(current)) {
1825 next = current->prototype(); 1798 next = current->prototype();
1826 // There should never be a dead map above a live map. 1799 // There should never be a dead map above a live map.
1827 MarkBit current_mark = Marking::MarkBitFromOldSpace(current); 1800 MarkBit current_mark = Marking::MarkBitFrom(current);
1828 ASSERT(on_dead_path || current_mark.Get()); 1801 ASSERT(on_dead_path || current_mark.Get());
1829 1802
1830 // A live map above a dead map indicates a dead transition. 1803 // A live map above a dead map indicates a dead transition.
1831 // This test will always be false on the first iteration. 1804 // This test will always be false on the first iteration.
1832 if (on_dead_path && current_mark.Get()) { 1805 if (on_dead_path && current_mark.Get()) {
1833 on_dead_path = false; 1806 on_dead_path = false;
1834 current->ClearNonLiveTransitions(heap(), real_prototype); 1807 current->ClearNonLiveTransitions(heap(), real_prototype);
1835 } 1808 }
1836 *HeapObject::RawField(current, Map::kPrototypeOffset) = 1809 *HeapObject::RawField(current, Map::kPrototypeOffset) =
1837 real_prototype; 1810 real_prototype;
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 int survivors_size = 0; 1983 int survivors_size = 0;
2011 1984
2012 // First pass: traverse all objects in inactive semispace, remove marks, 1985 // First pass: traverse all objects in inactive semispace, remove marks,
2013 // migrate live objects and write forwarding addresses. This stage puts 1986 // migrate live objects and write forwarding addresses. This stage puts
2014 // new entries in the store buffer and may cause some pages to be marked 1987 // new entries in the store buffer and may cause some pages to be marked
2015 // scan-on-scavenge. 1988 // scan-on-scavenge.
2016 for (Address current = from_bottom; current < from_top; current += size) { 1989 for (Address current = from_bottom; current < from_top; current += size) {
2017 HeapObject* object = HeapObject::FromAddress(current); 1990 HeapObject* object = HeapObject::FromAddress(current);
2018 1991
2019 1992
2020 MarkBit mark_bit = heap_->marking()->MarkBitFromNewSpace(object); 1993 MarkBit mark_bit = heap_->marking()->MarkBitFrom(object);
2021 if (mark_bit.Get()) { 1994 if (mark_bit.Get()) {
2022 mark_bit.Clear(); 1995 mark_bit.Clear();
2023 heap_->mark_compact_collector()->tracer()->decrement_marked_count(); 1996 heap_->mark_compact_collector()->tracer()->decrement_marked_count();
2024 1997
2025 size = object->Size(); 1998 size = object->Size();
2026 survivors_size += size; 1999 survivors_size += size;
2027 2000
2028 // Aggressively promote young survivors to the old space. 2001 // Aggressively promote young survivors to the old space.
2029 if (TryPromoteObject(heap_, object, size)) { 2002 if (TryPromoteObject(heap_, object, size)) {
2030 continue; 2003 continue;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
2106 uint32_t free_start, 2079 uint32_t free_start,
2107 uint32_t region_end, 2080 uint32_t region_end,
2108 uint32_t* cells)); 2081 uint32_t* cells));
2109 2082
2110 2083
2111 static uint32_t SweepFree(PagedSpace* space, 2084 static uint32_t SweepFree(PagedSpace* space,
2112 Page* p, 2085 Page* p,
2113 uint32_t free_start, 2086 uint32_t free_start,
2114 uint32_t region_end, 2087 uint32_t region_end,
2115 uint32_t* cells) { 2088 uint32_t* cells) {
2116 uint32_t free_cell_index = Page::MarkbitsBitmap::IndexToCell(free_start); 2089 uint32_t free_cell_index = Bitmap::IndexToCell(free_start);
2117 ASSERT(cells[free_cell_index] == 0); 2090 ASSERT(cells[free_cell_index] == 0);
2118 while (free_cell_index < region_end && cells[free_cell_index] == 0) { 2091 while (free_cell_index < region_end && cells[free_cell_index] == 0) {
2119 free_cell_index++; 2092 free_cell_index++;
2120 } 2093 }
2121 2094
2122 if (free_cell_index >= region_end) { 2095 if (free_cell_index >= region_end) {
2123 return free_cell_index; 2096 return free_cell_index;
2124 } 2097 }
2125 2098
2126 uint32_t free_end = Page::MarkbitsBitmap::CellToIndex(free_cell_index); 2099 uint32_t free_end = Bitmap::CellToIndex(free_cell_index);
2127 space->Free(p->MarkbitIndexToAddress(free_start), 2100 space->Free(p->MarkbitIndexToAddress(free_start),
2128 (free_end - free_start) << kPointerSizeLog2); 2101 (free_end - free_start) << kPointerSizeLog2);
2129 2102
2130 return free_cell_index; 2103 return free_cell_index;
2131 } 2104 }
2132 2105
2133 2106
2134 INLINE(static uint32_t NextCandidate(uint32_t cell_index, 2107 INLINE(static uint32_t NextCandidate(uint32_t cell_index,
2135 uint32_t last_cell_index, 2108 uint32_t last_cell_index,
2136 uint32_t* cells)); 2109 uint32_t* cells));
(...skipping 17 matching lines...) Expand all
2154 static int SizeOfPreviousObject(Page* p, 2127 static int SizeOfPreviousObject(Page* p,
2155 uint32_t cell_index, 2128 uint32_t cell_index,
2156 uint32_t* cells) { 2129 uint32_t* cells) {
2157 ASSERT(cells[cell_index] == 0); 2130 ASSERT(cells[cell_index] == 0);
2158 if (cells[cell_index - 1] == 0) return 0; 2131 if (cells[cell_index - 1] == 0) return 0;
2159 2132
2160 int leading_zeroes = 2133 int leading_zeroes =
2161 CompilerIntrinsics::CountLeadingZeros(cells[cell_index - 1]) + 1; 2134 CompilerIntrinsics::CountLeadingZeros(cells[cell_index - 1]) + 1;
2162 Address addr = 2135 Address addr =
2163 p->MarkbitIndexToAddress( 2136 p->MarkbitIndexToAddress(
2164 Page::MarkbitsBitmap::CellToIndex(cell_index) - leading_zeroes); 2137 Bitmap::CellToIndex(cell_index) - leading_zeroes);
2165 HeapObject* obj = HeapObject::FromAddress(addr); 2138 HeapObject* obj = HeapObject::FromAddress(addr);
2166 ASSERT(obj->map()->IsMap()); 2139 ASSERT(obj->map()->IsMap());
2167 return (obj->Size() >> kPointerSizeLog2) - leading_zeroes; 2140 return (obj->Size() >> kPointerSizeLog2) - leading_zeroes;
2168 } 2141 }
2169 2142
2170 2143
2171 static const int kStartTableEntriesPerLine = 5; 2144 static const int kStartTableEntriesPerLine = 5;
2172 static const int kStartTableLines = 171; 2145 static const int kStartTableLines = 171;
2173 static const int kStartTableInvalidLine = 127; 2146 static const int kStartTableInvalidLine = 127;
2174 static const int kStartTableUnusedEntry = 126; 2147 static const int kStartTableUnusedEntry = 126;
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
2449 int freed_bytes = 0; 2422 int freed_bytes = 0;
2450 2423
2451 MarkBit::CellType* cells = p->markbits()->cells(); 2424 MarkBit::CellType* cells = p->markbits()->cells();
2452 2425
2453 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); 2426 p->SetFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2454 2427
2455 // This is the start of the 32 word block that we are currently looking at. 2428 // This is the start of the 32 word block that we are currently looking at.
2456 Address block_address = p->ObjectAreaStart(); 2429 Address block_address = p->ObjectAreaStart();
2457 2430
2458 int last_cell_index = 2431 int last_cell_index =
2459 Page::MarkbitsBitmap::IndexToCell( 2432 Bitmap::IndexToCell(
2460 Page::MarkbitsBitmap::CellAlignIndex( 2433 Bitmap::CellAlignIndex(
2461 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); 2434 p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
2462 2435
2463 int cell_index = Page::kFirstUsedCell; 2436 int cell_index = Page::kFirstUsedCell;
2464 2437
2465 // Skip over all the dead objects at the start of the page and mark them free. 2438 // Skip over all the dead objects at the start of the page and mark them free.
2466 for (cell_index = Page::kFirstUsedCell; 2439 for (cell_index = Page::kFirstUsedCell;
2467 cell_index < last_cell_index; 2440 cell_index < last_cell_index;
2468 cell_index++, block_address += 32 * kPointerSize) { 2441 cell_index++, block_address += 32 * kPointerSize) {
2469 if (cells[cell_index] != 0) break; 2442 if (cells[cell_index] != 0) break;
2470 } 2443 }
(...skipping 14 matching lines...) Expand all
2485 // started. Unless we find a large free space in the bitmap we will not 2458 // started. Unless we find a large free space in the bitmap we will not
2486 // digest this pair into a real address. We start the iteration here at the 2459 // digest this pair into a real address. We start the iteration here at the
2487 // first word in the marking bit map that indicates a live object. 2460 // first word in the marking bit map that indicates a live object.
2488 Address free_start = block_address; 2461 Address free_start = block_address;
2489 uint32_t free_start_cell = cells[cell_index]; 2462 uint32_t free_start_cell = cells[cell_index];
2490 2463
2491 for ( ; 2464 for ( ;
2492 cell_index < last_cell_index; 2465 cell_index < last_cell_index;
2493 cell_index++, block_address += 32 * kPointerSize) { 2466 cell_index++, block_address += 32 * kPointerSize) {
2494 ASSERT((unsigned)cell_index == 2467 ASSERT((unsigned)cell_index ==
2495 Page::MarkbitsBitmap::IndexToCell( 2468 Bitmap::IndexToCell(
2496 Page::MarkbitsBitmap::CellAlignIndex( 2469 Bitmap::CellAlignIndex(
2497 p->AddressToMarkbitIndex(block_address)))); 2470 p->AddressToMarkbitIndex(block_address))));
2498 uint32_t cell = cells[cell_index]; 2471 uint32_t cell = cells[cell_index];
2499 if (cell != 0) { 2472 if (cell != 0) {
2500 // We have a live object. Check approximately whether it is more than 32 2473 // We have a live object. Check approximately whether it is more than 32
2501 // words since the last live object. 2474 // words since the last live object.
2502 if (block_address - free_start > 32 * kPointerSize) { 2475 if (block_address - free_start > 32 * kPointerSize) {
2503 free_start = DigestFreeStart(free_start, free_start_cell); 2476 free_start = DigestFreeStart(free_start, free_start_cell);
2504 if (block_address - free_start > 32 * kPointerSize) { 2477 if (block_address - free_start > 32 * kPointerSize) {
2505 // Now that we know the exact start of the free space it still looks 2478 // Now that we know the exact start of the free space it still looks
2506 // like we have a large enough free space to be worth bothering with. 2479 // like we have a large enough free space to be worth bothering with.
(...skipping 23 matching lines...) Expand all
2530 // be iterated precisely, hitting only the live objects. Code space 2503 // be iterated precisely, hitting only the live objects. Code space
2531 // is always swept precisely because we want to be able to iterate 2504 // is always swept precisely because we want to be able to iterate
2532 // over it. Map space is swept precisely, because it is not compacted. 2505 // over it. Map space is swept precisely, because it is not compacted.
2533 static void SweepPrecisely(PagedSpace* space, 2506 static void SweepPrecisely(PagedSpace* space,
2534 Page* p) { 2507 Page* p) {
2535 MarkBit::CellType* cells = p->markbits()->cells(); 2508 MarkBit::CellType* cells = p->markbits()->cells();
2536 2509
2537 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY); 2510 p->ClearFlag(MemoryChunk::WAS_SWEPT_CONSERVATIVELY);
2538 2511
2539 int last_cell_index = 2512 int last_cell_index =
2540 Page::MarkbitsBitmap::IndexToCell( 2513 Bitmap::IndexToCell(
2541 Page::MarkbitsBitmap::CellAlignIndex( 2514 Bitmap::CellAlignIndex(
2542 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); 2515 p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
2543 2516
2544 int cell_index = Page::kFirstUsedCell; 2517 int cell_index = Page::kFirstUsedCell;
2545 Address free_start = p->ObjectAreaStart(); 2518 Address free_start = p->ObjectAreaStart();
2546 ASSERT(reinterpret_cast<uint32_t>(free_start) % (32 * kPointerSize) == 0); 2519 ASSERT(reinterpret_cast<uint32_t>(free_start) % (32 * kPointerSize) == 0);
2547 Address object_address = p->ObjectAreaStart(); 2520 Address object_address = p->ObjectAreaStart();
2548 int offsets[16]; 2521 int offsets[16];
2549 2522
2550 for (cell_index = Page::kFirstUsedCell; 2523 for (cell_index = Page::kFirstUsedCell;
2551 cell_index < last_cell_index; 2524 cell_index < last_cell_index;
2552 cell_index++, object_address += 32 * kPointerSize) { 2525 cell_index++, object_address += 32 * kPointerSize) {
2553 ASSERT((unsigned)cell_index == 2526 ASSERT((unsigned)cell_index ==
2554 Page::MarkbitsBitmap::IndexToCell( 2527 Bitmap::IndexToCell(
2555 Page::MarkbitsBitmap::CellAlignIndex( 2528 Bitmap::CellAlignIndex(
2556 p->AddressToMarkbitIndex(object_address)))); 2529 p->AddressToMarkbitIndex(object_address))));
2557 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets); 2530 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
2558 int live_index = 0; 2531 int live_index = 0;
2559 for ( ; live_objects != 0; live_objects--) { 2532 for ( ; live_objects != 0; live_objects--) {
2560 Address free_end = object_address + offsets[live_index++] * kPointerSize; 2533 Address free_end = object_address + offsets[live_index++] * kPointerSize;
2561 if (free_end != free_start) { 2534 if (free_end != free_start) {
2562 space->Free(free_start, free_end - free_start); 2535 space->Free(free_start, free_end - free_start);
2563 } 2536 }
2564 HeapObject* live_object = HeapObject::FromAddress(free_end); 2537 HeapObject* live_object = HeapObject::FromAddress(free_end);
2565 free_start = free_end + live_object->Size(); 2538 free_start = free_end + live_object->Size();
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
2726 } 2699 }
2727 2700
2728 2701
2729 void MarkCompactCollector::Initialize() { 2702 void MarkCompactCollector::Initialize() {
2730 StaticPointersToNewGenUpdatingVisitor::Initialize(); 2703 StaticPointersToNewGenUpdatingVisitor::Initialize();
2731 StaticMarkingVisitor::Initialize(); 2704 StaticMarkingVisitor::Initialize();
2732 } 2705 }
2733 2706
2734 2707
2735 } } // namespace v8::internal 2708 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/mark-compact-inl.h » ('j') | src/serialize.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698