Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 39 namespace v8 { | 39 namespace v8 { |
| 40 namespace internal { | 40 namespace internal { |
| 41 | 41 |
| 42 // ------------------------------------------------------------------------- | 42 // ------------------------------------------------------------------------- |
| 43 // MarkCompactCollector | 43 // MarkCompactCollector |
| 44 | 44 |
| 45 bool MarkCompactCollector::force_compaction_ = false; | 45 bool MarkCompactCollector::force_compaction_ = false; |
| 46 bool MarkCompactCollector::compacting_collection_ = false; | 46 bool MarkCompactCollector::compacting_collection_ = false; |
| 47 bool MarkCompactCollector::compact_on_next_gc_ = false; | 47 bool MarkCompactCollector::compact_on_next_gc_ = false; |
| 48 | 48 |
| 49 int MarkCompactCollector::previous_marked_count_ = 0; | |
| 50 GCTracer* MarkCompactCollector::tracer_ = NULL; | 49 GCTracer* MarkCompactCollector::tracer_ = NULL; |
| 51 | 50 |
| 52 | |
| 53 #ifdef DEBUG | 51 #ifdef DEBUG |
| 54 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | 52 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; |
| 55 | 53 |
| 56 // Counters used for debugging the marking phase of mark-compact or mark-sweep | 54 // Counters used for debugging the marking phase of mark-compact or mark-sweep |
| 57 // collection. | 55 // collection. |
| 58 int MarkCompactCollector::live_bytes_ = 0; | 56 int MarkCompactCollector::live_bytes_ = 0; |
| 59 int MarkCompactCollector::live_young_objects_size_ = 0; | 57 int MarkCompactCollector::live_young_objects_size_ = 0; |
| 60 int MarkCompactCollector::live_old_data_objects_size_ = 0; | 58 int MarkCompactCollector::live_old_data_objects_size_ = 0; |
| 61 int MarkCompactCollector::live_old_pointer_objects_size_ = 0; | 59 int MarkCompactCollector::live_old_pointer_objects_size_ = 0; |
| 62 int MarkCompactCollector::live_code_objects_size_ = 0; | 60 int MarkCompactCollector::live_code_objects_size_ = 0; |
| 63 int MarkCompactCollector::live_map_objects_size_ = 0; | 61 int MarkCompactCollector::live_map_objects_size_ = 0; |
| 64 int MarkCompactCollector::live_cell_objects_size_ = 0; | 62 int MarkCompactCollector::live_cell_objects_size_ = 0; |
| 65 int MarkCompactCollector::live_lo_objects_size_ = 0; | 63 int MarkCompactCollector::live_lo_objects_size_ = 0; |
| 66 #endif | 64 #endif |
| 67 | 65 |
| 68 | 66 |
| 67 Marking::NewSpaceMarkbitsBitmap* Marking::new_space_bitmap_ = NULL; | |
| 68 | |
| 69 | |
| 70 bool Marking::Setup() { | |
| 71 if (new_space_bitmap_ == NULL) { | |
| 72 int markbits_per_newspace = | |
| 73 (2*Heap::MaxSemiSpaceSize()) >> kPointerSizeLog2; | |
| 74 | |
| 75 new_space_bitmap_ = | |
| 76 BitmapStorageDescriptor::Allocate( | |
| 77 NewSpaceMarkbitsBitmap::CellsForLength(markbits_per_newspace)); | |
| 78 } | |
| 79 return new_space_bitmap_ != NULL; | |
| 80 } | |
| 81 | |
| 82 | |
| 83 void Marking::TearDown() { | |
| 84 if (new_space_bitmap_ != NULL) { | |
| 85 BitmapStorageDescriptor::Free(new_space_bitmap_); | |
| 86 new_space_bitmap_ = NULL; | |
| 87 } | |
| 88 } | |
| 89 | |
| 90 | |
| 69 void MarkCompactCollector::CollectGarbage() { | 91 void MarkCompactCollector::CollectGarbage() { |
| 70 // Make sure that Prepare() has been called. The individual steps below will | 92 // Make sure that Prepare() has been called. The individual steps below will |
| 71 // update the state as they proceed. | 93 // update the state as they proceed. |
| 72 ASSERT(state_ == PREPARE_GC); | 94 ASSERT(state_ == PREPARE_GC); |
| 73 | 95 |
| 74 // Prepare has selected whether to compact the old generation or not. | 96 // Prepare has selected whether to compact the old generation or not. |
| 75 // Tell the tracer. | 97 // Tell the tracer. |
| 76 if (IsCompacting()) tracer_->set_is_compacting(); | 98 if (IsCompacting()) tracer_->set_is_compacting(); |
| 77 | 99 |
| 78 MarkLiveObjects(); | 100 MarkLiveObjects(); |
| 79 | 101 |
| 80 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 102 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 81 | 103 |
| 82 SweepLargeObjectSpace(); | 104 SweepLargeObjectSpace(); |
| 83 | 105 |
| 84 SweepSpaces(); | 106 SweepSpaces(); |
| 85 PcToCodeCache::FlushPcToCodeCache(); | 107 PcToCodeCache::FlushPcToCodeCache(); |
| 86 | 108 |
| 87 Finish(); | 109 Finish(); |
| 88 | 110 |
| 89 // Save the count of marked objects remaining after the collection and | 111 // Check that swept all marked objects and |
|
Erik Corry
2011/01/07 12:13:21
that swept -> that we swept
Also formatting is off
| |
| 90 // null out the GC tracer. | 112 // null out the GC tracer. |
| 91 previous_marked_count_ = tracer_->marked_count(); | 113 ASSERT(tracer_->marked_count() == 0); |
| 92 ASSERT(previous_marked_count_ == 0); | |
| 93 tracer_ = NULL; | 114 tracer_ = NULL; |
| 94 } | 115 } |
| 95 | 116 |
| 96 | 117 |
| 118 #ifdef DEBUG | |
| 119 static void VerifyMarkbitsAreClean(PagedSpace* space) { | |
| 120 PageIterator it(space, PageIterator::PAGES_IN_USE); | |
| 121 | |
| 122 while (it.has_next()) { | |
| 123 Page* p = it.next(); | |
| 124 ASSERT(p->markbits()->IsClean()); | |
| 125 } | |
| 126 } | |
| 127 | |
| 128 static void VerifyMarkbitsAreClean() { | |
| 129 VerifyMarkbitsAreClean(Heap::old_pointer_space()); | |
| 130 VerifyMarkbitsAreClean(Heap::old_data_space()); | |
| 131 VerifyMarkbitsAreClean(Heap::code_space()); | |
| 132 VerifyMarkbitsAreClean(Heap::cell_space()); | |
| 133 VerifyMarkbitsAreClean(Heap::map_space()); | |
| 134 } | |
| 135 #endif | |
| 136 | |
| 137 | |
| 97 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 138 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| 98 FLAG_flush_code = false; | 139 FLAG_flush_code = false; |
| 99 FLAG_always_compact = false; | 140 FLAG_always_compact = false; |
| 100 FLAG_never_compact = true; | 141 FLAG_never_compact = true; |
| 101 | 142 |
| 102 // Rather than passing the tracer around we stash it in a static member | 143 // Rather than passing the tracer around we stash it in a static member |
| 103 // variable. | 144 // variable. |
| 104 tracer_ = tracer; | 145 tracer_ = tracer; |
| 105 | 146 |
| 106 #ifdef DEBUG | 147 #ifdef DEBUG |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 117 if (!Heap::map_space()->MapPointersEncodable()) | 158 if (!Heap::map_space()->MapPointersEncodable()) |
| 118 compacting_collection_ = false; | 159 compacting_collection_ = false; |
| 119 if (FLAG_collect_maps) CreateBackPointers(); | 160 if (FLAG_collect_maps) CreateBackPointers(); |
| 120 | 161 |
| 121 PagedSpaces spaces; | 162 PagedSpaces spaces; |
| 122 for (PagedSpace* space = spaces.next(); | 163 for (PagedSpace* space = spaces.next(); |
| 123 space != NULL; space = spaces.next()) { | 164 space != NULL; space = spaces.next()) { |
| 124 space->PrepareForMarkCompact(compacting_collection_); | 165 space->PrepareForMarkCompact(compacting_collection_); |
| 125 } | 166 } |
| 126 | 167 |
| 168 Address new_space_top = Heap::new_space()->top(); | |
| 169 Address new_space_bottom = Heap::new_space()->bottom(); | |
| 170 Marking::ClearRange(new_space_bottom, | |
| 171 static_cast<int>(new_space_top - new_space_bottom)); | |
| 172 | |
| 127 #ifdef DEBUG | 173 #ifdef DEBUG |
| 174 VerifyMarkbitsAreClean(); | |
| 175 | |
| 128 live_bytes_ = 0; | 176 live_bytes_ = 0; |
| 129 live_young_objects_size_ = 0; | 177 live_young_objects_size_ = 0; |
| 130 live_old_pointer_objects_size_ = 0; | 178 live_old_pointer_objects_size_ = 0; |
| 131 live_old_data_objects_size_ = 0; | 179 live_old_data_objects_size_ = 0; |
| 132 live_code_objects_size_ = 0; | 180 live_code_objects_size_ = 0; |
| 133 live_map_objects_size_ = 0; | 181 live_map_objects_size_ = 0; |
| 134 live_cell_objects_size_ = 0; | 182 live_cell_objects_size_ = 0; |
| 135 live_lo_objects_size_ = 0; | 183 live_lo_objects_size_ = 0; |
| 136 #endif | 184 #endif |
| 137 } | 185 } |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 234 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 282 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); |
| 235 | 283 |
| 236 JSFunction* candidate = jsfunction_candidates_head_; | 284 JSFunction* candidate = jsfunction_candidates_head_; |
| 237 JSFunction* next_candidate; | 285 JSFunction* next_candidate; |
| 238 while (candidate != NULL) { | 286 while (candidate != NULL) { |
| 239 next_candidate = GetNextCandidate(candidate); | 287 next_candidate = GetNextCandidate(candidate); |
| 240 | 288 |
| 241 SharedFunctionInfo* shared = candidate->unchecked_shared(); | 289 SharedFunctionInfo* shared = candidate->unchecked_shared(); |
| 242 | 290 |
| 243 Code* code = shared->unchecked_code(); | 291 Code* code = shared->unchecked_code(); |
| 244 if (!code->IsMarked()) { | 292 if (!Marking::IsMarked(code->address())) { |
| 245 shared->set_code(lazy_compile); | 293 shared->set_code(lazy_compile); |
| 246 candidate->set_code(lazy_compile); | 294 candidate->set_code(lazy_compile); |
| 247 } else { | 295 } else { |
| 248 candidate->set_code(shared->unchecked_code()); | 296 candidate->set_code(shared->unchecked_code()); |
| 249 } | 297 } |
| 250 | 298 |
| 251 candidate = next_candidate; | 299 candidate = next_candidate; |
| 252 } | 300 } |
| 253 | 301 |
| 254 jsfunction_candidates_head_ = NULL; | 302 jsfunction_candidates_head_ = NULL; |
| 255 } | 303 } |
| 256 | 304 |
| 257 | 305 |
| 258 static void ProcessSharedFunctionInfoCandidates() { | 306 static void ProcessSharedFunctionInfoCandidates() { |
| 259 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 307 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); |
| 260 | 308 |
| 261 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 309 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 262 SharedFunctionInfo* next_candidate; | 310 SharedFunctionInfo* next_candidate; |
| 263 while (candidate != NULL) { | 311 while (candidate != NULL) { |
| 264 next_candidate = GetNextCandidate(candidate); | 312 next_candidate = GetNextCandidate(candidate); |
| 265 SetNextCandidate(candidate, NULL); | 313 SetNextCandidate(candidate, NULL); |
| 266 | 314 |
| 267 Code* code = candidate->unchecked_code(); | 315 Code* code = candidate->unchecked_code(); |
| 268 if (!code->IsMarked()) { | 316 if (!Marking::IsMarked(code->address())) { |
| 269 candidate->set_code(lazy_compile); | 317 candidate->set_code(lazy_compile); |
| 270 } | 318 } |
| 271 | 319 |
| 272 candidate = next_candidate; | 320 candidate = next_candidate; |
| 273 } | 321 } |
| 274 | 322 |
| 275 shared_function_info_candidates_head_ = NULL; | 323 shared_function_info_candidates_head_ = NULL; |
| 276 } | 324 } |
| 277 | 325 |
| 278 | 326 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 330 // | 378 // |
| 331 // Here we assume that if we change *p, we replace it with a heap object | 379 // Here we assume that if we change *p, we replace it with a heap object |
| 332 // (ie, the left substring of a cons string is always a heap object). | 380 // (ie, the left substring of a cons string is always a heap object). |
| 333 // | 381 // |
| 334 // The check performed is: | 382 // The check performed is: |
| 335 // object->IsConsString() && !object->IsSymbol() && | 383 // object->IsConsString() && !object->IsSymbol() && |
| 336 // (ConsString::cast(object)->second() == Heap::empty_string()) | 384 // (ConsString::cast(object)->second() == Heap::empty_string()) |
| 337 // except the maps for the object and its possible substrings might be | 385 // except the maps for the object and its possible substrings might be |
| 338 // marked. | 386 // marked. |
| 339 HeapObject* object = HeapObject::cast(*p); | 387 HeapObject* object = HeapObject::cast(*p); |
| 340 MapWord map_word = object->map_word(); | 388 InstanceType type = object->map()->instance_type(); |
| 341 map_word.ClearMark(); | |
| 342 InstanceType type = map_word.ToMap()->instance_type(); | |
| 343 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; | 389 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; |
| 344 | 390 |
| 345 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); | 391 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); |
| 346 if (second != Heap::raw_unchecked_empty_string()) { | 392 if (second != Heap::raw_unchecked_empty_string()) { |
| 347 return object; | 393 return object; |
| 348 } | 394 } |
| 349 | 395 |
| 350 // Since we don't have the object's start, it is impossible to update the | 396 // Since we don't have the object's start, it is impossible to update the |
| 351 // page dirty marks. Therefore, we only replace the string with its left | 397 // page dirty marks. Therefore, we only replace the string with its left |
| 352 // substring when page dirty marks do not change. | 398 // substring when page dirty marks do not change. |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 487 INLINE(static void MarkObjectByPointer(Object** p)) { | 533 INLINE(static void MarkObjectByPointer(Object** p)) { |
| 488 if (!(*p)->IsHeapObject()) return; | 534 if (!(*p)->IsHeapObject()) return; |
| 489 HeapObject* object = ShortCircuitConsString(p); | 535 HeapObject* object = ShortCircuitConsString(p); |
| 490 MarkCompactCollector::MarkObject(object); | 536 MarkCompactCollector::MarkObject(object); |
| 491 } | 537 } |
| 492 | 538 |
| 493 // Visit an unmarked object. | 539 // Visit an unmarked object. |
| 494 static inline void VisitUnmarkedObject(HeapObject* obj) { | 540 static inline void VisitUnmarkedObject(HeapObject* obj) { |
| 495 #ifdef DEBUG | 541 #ifdef DEBUG |
| 496 ASSERT(Heap::Contains(obj)); | 542 ASSERT(Heap::Contains(obj)); |
| 497 ASSERT(!obj->IsMarked()); | 543 ASSERT(!Marking::IsMarked(obj->address())); |
|
Erik Corry
2011/01/07 12:13:21
I wonder whether the compiler can work out that th
| |
| 498 #endif | 544 #endif |
| 499 Map* map = obj->map(); | 545 Map* map = obj->map(); |
| 500 MarkCompactCollector::SetMark(obj); | 546 MarkCompactCollector::SetMark(obj); |
| 501 // Mark the map pointer and the body. | 547 // Mark the map pointer and the body. |
| 502 MarkCompactCollector::MarkObject(map); | 548 MarkCompactCollector::MarkObject(map); |
| 503 IterateBody(map, obj); | 549 IterateBody(map, obj); |
| 504 } | 550 } |
| 505 | 551 |
| 506 // Visit all unmarked objects pointed to by [start, end). | 552 // Visit all unmarked objects pointed to by [start, end). |
| 507 // Returns false if the operation fails (lack of stack space). | 553 // Returns false if the operation fails (lack of stack space). |
| 508 static inline bool VisitUnmarkedObjects(Object** start, Object** end) { | 554 static inline bool VisitUnmarkedObjects(Object** start, Object** end) { |
|
Erik Corry
2011/01/07 12:13:21
It would be nice to disassemble this function to s
| |
| 509 // Return false is we are close to the stack limit. | 555 // Return false is we are close to the stack limit. |
| 510 StackLimitCheck check; | 556 StackLimitCheck check; |
| 511 if (check.HasOverflowed()) return false; | 557 if (check.HasOverflowed()) return false; |
| 512 | 558 |
| 513 // Visit the unmarked objects. | 559 // Visit the unmarked objects. |
| 514 for (Object** p = start; p < end; p++) { | 560 for (Object** p = start; p < end; p++) { |
| 515 if (!(*p)->IsHeapObject()) continue; | 561 if (!(*p)->IsHeapObject()) continue; |
| 516 HeapObject* obj = HeapObject::cast(*p); | 562 HeapObject* obj = HeapObject::cast(*p); |
| 517 if (obj->IsMarked()) continue; | 563 if (Marking::IsMarked(obj)) continue; |
| 518 VisitUnmarkedObject(obj); | 564 VisitUnmarkedObject(obj); |
| 519 } | 565 } |
| 520 return true; | 566 return true; |
| 521 } | 567 } |
| 522 | 568 |
| 523 static inline void VisitExternalReference(Address* p) { } | 569 static inline void VisitExternalReference(Address* p) { } |
| 524 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } | 570 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } |
| 525 | 571 |
| 526 private: | 572 private: |
| 527 class DataObjectVisitor { | 573 class DataObjectVisitor { |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 568 inline static bool IsCompiled(SharedFunctionInfo* function) { | 614 inline static bool IsCompiled(SharedFunctionInfo* function) { |
| 569 return | 615 return |
| 570 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); | 616 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); |
| 571 } | 617 } |
| 572 | 618 |
| 573 inline static bool IsFlushable(JSFunction* function) { | 619 inline static bool IsFlushable(JSFunction* function) { |
| 574 SharedFunctionInfo* shared_info = function->unchecked_shared(); | 620 SharedFunctionInfo* shared_info = function->unchecked_shared(); |
| 575 | 621 |
| 576 // Code is either on stack, in compilation cache or referenced | 622 // Code is either on stack, in compilation cache or referenced |
| 577 // by optimized version of function. | 623 // by optimized version of function. |
| 578 if (function->unchecked_code()->IsMarked()) { | 624 if (Marking::IsMarked(function->unchecked_code())) { |
| 579 shared_info->set_code_age(0); | 625 shared_info->set_code_age(0); |
| 580 return false; | 626 return false; |
| 581 } | 627 } |
| 582 | 628 |
| 583 // We do not flush code for optimized functions. | 629 // We do not flush code for optimized functions. |
| 584 if (function->code() != shared_info->unchecked_code()) { | 630 if (function->code() != shared_info->unchecked_code()) { |
| 585 return false; | 631 return false; |
| 586 } | 632 } |
| 587 | 633 |
| 588 return IsFlushable(shared_info); | 634 return IsFlushable(shared_info); |
| 589 } | 635 } |
| 590 | 636 |
| 591 inline static bool IsFlushable(SharedFunctionInfo* shared_info) { | 637 inline static bool IsFlushable(SharedFunctionInfo* shared_info) { |
| 592 // Code is either on stack, in compilation cache or referenced | 638 // Code is either on stack, in compilation cache or referenced |
| 593 // by optimized version of function. | 639 // by optimized version of function. |
| 594 if (shared_info->unchecked_code()->IsMarked()) { | 640 if (Marking::IsMarked(shared_info->unchecked_code())) { |
| 595 shared_info->set_code_age(0); | 641 shared_info->set_code_age(0); |
| 596 return false; | 642 return false; |
| 597 } | 643 } |
| 598 | 644 |
| 599 // The function must be compiled and have the source code available, | 645 // The function must be compiled and have the source code available, |
| 600 // to be able to recompile it in case we need the function again. | 646 // to be able to recompile it in case we need the function again. |
| 601 if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) { | 647 if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) { |
| 602 return false; | 648 return false; |
| 603 } | 649 } |
| 604 | 650 |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 635 // This function's code looks flushable. But we have to postpone the | 681 // This function's code looks flushable. But we have to postpone the |
| 636 // decision until we see all functions that point to the same | 682 // decision until we see all functions that point to the same |
| 637 // SharedFunctionInfo because some of them might be optimized. | 683 // SharedFunctionInfo because some of them might be optimized. |
| 638 // That would make the nonoptimized version of the code nonflushable, | 684 // That would make the nonoptimized version of the code nonflushable, |
| 639 // because it is required for bailing out from optimized code. | 685 // because it is required for bailing out from optimized code. |
| 640 FlushCode::AddCandidate(function); | 686 FlushCode::AddCandidate(function); |
| 641 return true; | 687 return true; |
| 642 } | 688 } |
| 643 | 689 |
| 644 | 690 |
| 645 static inline Map* SafeMap(Object* obj) { | 691 static inline Map* SafeMap(Object* obj) { |
|
Erik Corry
2011/01/07 12:13:21
Get rid of this function?
| |
| 646 MapWord map_word = HeapObject::cast(obj)->map_word(); | 692 return HeapObject::cast(obj)->map(); |
| 647 map_word.ClearMark(); | |
| 648 map_word.ClearOverflow(); | |
| 649 return map_word.ToMap(); | |
| 650 } | 693 } |
| 651 | 694 |
| 652 | 695 |
| 653 static inline bool IsJSBuiltinsObject(Object* obj) { | 696 static inline bool IsJSBuiltinsObject(Object* obj) { |
| 654 return obj->IsHeapObject() && | 697 return obj->IsHeapObject() && |
| 655 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); | 698 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); |
| 656 } | 699 } |
| 657 | 700 |
| 658 | 701 |
| 659 static inline bool IsValidNotBuiltinContext(Object* ctx) { | 702 static inline bool IsValidNotBuiltinContext(Object* ctx) { |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 775 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); | 818 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); |
| 776 | 819 |
| 777 if (!flush_code_candidate) { | 820 if (!flush_code_candidate) { |
| 778 VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset); | 821 VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset); |
| 779 } else { | 822 } else { |
| 780 // Don't visit code object. | 823 // Don't visit code object. |
| 781 | 824 |
| 782 // Visit shared function info to avoid double checking of it's | 825 // Visit shared function info to avoid double checking of it's |
| 783 // flushability. | 826 // flushability. |
| 784 SharedFunctionInfo* shared_info = object->unchecked_shared(); | 827 SharedFunctionInfo* shared_info = object->unchecked_shared(); |
| 785 if (!shared_info->IsMarked()) { | 828 if (!Marking::IsMarked(shared_info)) { |
| 786 Map* shared_info_map = shared_info->map(); | 829 Map* shared_info_map = shared_info->map(); |
| 787 MarkCompactCollector::SetMark(shared_info); | 830 MarkCompactCollector::SetMark(shared_info); |
| 788 MarkCompactCollector::MarkObject(shared_info_map); | 831 MarkCompactCollector::MarkObject(shared_info_map); |
| 789 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, | 832 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, |
| 790 shared_info, | 833 shared_info, |
| 791 true); | 834 true); |
| 792 } | 835 } |
| 793 } | 836 } |
| 794 | 837 |
| 795 VisitPointers(SLOT_ADDR(object, | 838 VisitPointers(SLOT_ADDR(object, |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 921 void VisitPointers(Object** start, Object** end) { | 964 void VisitPointers(Object** start, Object** end) { |
| 922 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 965 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| 923 } | 966 } |
| 924 | 967 |
| 925 private: | 968 private: |
| 926 void MarkObjectByPointer(Object** p) { | 969 void MarkObjectByPointer(Object** p) { |
| 927 if (!(*p)->IsHeapObject()) return; | 970 if (!(*p)->IsHeapObject()) return; |
| 928 | 971 |
| 929 // Replace flat cons strings in place. | 972 // Replace flat cons strings in place. |
| 930 HeapObject* object = ShortCircuitConsString(p); | 973 HeapObject* object = ShortCircuitConsString(p); |
| 931 if (object->IsMarked()) return; | 974 if (Marking::IsMarked(object)) return; |
| 932 | 975 |
| 933 Map* map = object->map(); | 976 Map* map = object->map(); |
| 934 // Mark the object. | 977 // Mark the object. |
| 935 MarkCompactCollector::SetMark(object); | 978 MarkCompactCollector::SetMark(object); |
| 936 | 979 |
| 937 // Mark the map pointer and body, and push them on the marking stack. | 980 // Mark the map pointer and body, and push them on the marking stack. |
| 938 MarkCompactCollector::MarkObject(map); | 981 MarkCompactCollector::MarkObject(map); |
| 939 StaticMarkingVisitor::IterateBody(map, object); | 982 StaticMarkingVisitor::IterateBody(map, object); |
| 940 | 983 |
| 941 // Mark all the objects reachable from the map and body. May leave | 984 // Mark all the objects reachable from the map and body. May leave |
| 942 // overflowed objects in the heap. | 985 // overflowed objects in the heap. |
| 943 MarkCompactCollector::EmptyMarkingStack(); | 986 MarkCompactCollector::EmptyMarkingStack(); |
| 944 } | 987 } |
| 945 }; | 988 }; |
| 946 | 989 |
| 947 | 990 |
| 948 // Helper class for pruning the symbol table. | 991 // Helper class for pruning the symbol table. |
| 949 class SymbolTableCleaner : public ObjectVisitor { | 992 class SymbolTableCleaner : public ObjectVisitor { |
| 950 public: | 993 public: |
| 951 SymbolTableCleaner() : pointers_removed_(0) { } | 994 SymbolTableCleaner() : pointers_removed_(0) { } |
| 952 | 995 |
| 953 virtual void VisitPointers(Object** start, Object** end) { | 996 virtual void VisitPointers(Object** start, Object** end) { |
| 954 // Visit all HeapObject pointers in [start, end). | 997 // Visit all HeapObject pointers in [start, end). |
| 955 for (Object** p = start; p < end; p++) { | 998 for (Object** p = start; p < end; p++) { |
| 956 if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) { | 999 if ((*p)->IsHeapObject() && |
| 1000 !Marking::IsMarked(HeapObject::cast(*p))) { | |
| 957 // Check if the symbol being pruned is an external symbol. We need to | 1001 // Check if the symbol being pruned is an external symbol. We need to |
| 958 // delete the associated external data as this symbol is going away. | 1002 // delete the associated external data as this symbol is going away. |
| 959 | 1003 |
| 960 // Since no objects have yet been moved we can safely access the map of | 1004 // Since no objects have yet been moved we can safely access the map of |
| 961 // the object. | 1005 // the object. |
| 962 if ((*p)->IsExternalString()) { | 1006 if ((*p)->IsExternalString()) { |
| 963 Heap::FinalizeExternalString(String::cast(*p)); | 1007 Heap::FinalizeExternalString(String::cast(*p)); |
| 964 } | 1008 } |
| 965 // Set the entry to null_value (as deleted). | 1009 // Set the entry to null_value (as deleted). |
| 966 *p = Heap::raw_unchecked_null_value(); | 1010 *p = Heap::raw_unchecked_null_value(); |
| 967 pointers_removed_++; | 1011 pointers_removed_++; |
| 968 } | 1012 } |
| 969 } | 1013 } |
| 970 } | 1014 } |
| 971 | 1015 |
| 972 int PointersRemoved() { | 1016 int PointersRemoved() { |
| 973 return pointers_removed_; | 1017 return pointers_removed_; |
| 974 } | 1018 } |
| 975 private: | 1019 private: |
| 976 int pointers_removed_; | 1020 int pointers_removed_; |
| 977 }; | 1021 }; |
| 978 | 1022 |
| 979 | 1023 |
| 980 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1024 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
| 981 // are retained. | 1025 // are retained. |
| 982 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1026 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
| 983 public: | 1027 public: |
| 984 virtual Object* RetainAs(Object* object) { | 1028 virtual Object* RetainAs(Object* object) { |
| 985 MapWord first_word = HeapObject::cast(object)->map_word(); | 1029 if (Marking::IsMarked(HeapObject::cast(object))) { |
| 986 if (first_word.IsMarked()) { | |
| 987 return object; | 1030 return object; |
| 988 } else { | 1031 } else { |
| 989 return NULL; | 1032 return NULL; |
| 990 } | 1033 } |
| 991 } | 1034 } |
| 992 }; | 1035 }; |
| 993 | 1036 |
| 994 | 1037 |
| 995 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { | 1038 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
| 996 ASSERT(!object->IsMarked()); | 1039 ASSERT(Marking::IsMarked(object)); |
| 997 ASSERT(Heap::Contains(object)); | 1040 ASSERT(Heap::Contains(object)); |
| 998 if (object->IsMap()) { | 1041 if (object->IsMap()) { |
| 999 Map* map = Map::cast(object); | 1042 Map* map = Map::cast(object); |
| 1000 if (FLAG_cleanup_caches_in_maps_at_gc) { | 1043 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 1001 map->ClearCodeCache(); | 1044 map->ClearCodeCache(); |
| 1002 } | 1045 } |
| 1003 SetMark(map); | |
| 1004 if (FLAG_collect_maps && | 1046 if (FLAG_collect_maps && |
| 1005 map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 1047 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 1006 map->instance_type() <= JS_FUNCTION_TYPE) { | 1048 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 1007 MarkMapContents(map); | 1049 MarkMapContents(map); |
| 1008 } else { | 1050 } else { |
| 1009 marking_stack.Push(map); | 1051 marking_stack.Push(map); |
| 1010 } | 1052 } |
| 1011 } else { | 1053 } else { |
| 1012 SetMark(object); | |
| 1013 marking_stack.Push(object); | 1054 marking_stack.Push(object); |
| 1014 } | 1055 } |
| 1015 } | 1056 } |
| 1016 | 1057 |
| 1017 | 1058 |
| 1018 void MarkCompactCollector::MarkMapContents(Map* map) { | 1059 void MarkCompactCollector::MarkMapContents(Map* map) { |
| 1019 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( | 1060 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( |
| 1020 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); | 1061 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); |
| 1021 | 1062 |
| 1022 // Mark the Object* fields of the Map. | 1063 // Mark the Object* fields of the Map. |
| 1023 // Since the descriptor array has been marked already, it is fine | 1064 // Since the descriptor array has been marked already, it is fine |
| 1024 // that one of these fields contains a pointer to it. | 1065 // that one of these fields contains a pointer to it. |
| 1025 Object** start_slot = HeapObject::RawField(map, | 1066 Object** start_slot = HeapObject::RawField(map, |
| 1026 Map::kPointerFieldsBeginOffset); | 1067 Map::kPointerFieldsBeginOffset); |
| 1027 | 1068 |
| 1028 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); | 1069 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); |
| 1029 | 1070 |
| 1030 StaticMarkingVisitor::VisitPointers(start_slot, end_slot); | 1071 StaticMarkingVisitor::VisitPointers(start_slot, end_slot); |
| 1031 } | 1072 } |
| 1032 | 1073 |
| 1033 | 1074 |
| 1034 void MarkCompactCollector::MarkDescriptorArray( | 1075 void MarkCompactCollector::MarkDescriptorArray( |
| 1035 DescriptorArray* descriptors) { | 1076 DescriptorArray* descriptors) { |
| 1036 if (descriptors->IsMarked()) return; | 1077 if (Marking::IsMarked(descriptors)) return; |
| 1037 // Empty descriptor array is marked as a root before any maps are marked. | 1078 // Empty descriptor array is marked as a root before any maps are marked. |
| 1038 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); | 1079 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); |
| 1039 SetMark(descriptors); | 1080 SetMark(descriptors); |
| 1040 | 1081 |
| 1041 FixedArray* contents = reinterpret_cast<FixedArray*>( | 1082 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 1042 descriptors->get(DescriptorArray::kContentArrayIndex)); | 1083 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 1043 ASSERT(contents->IsHeapObject()); | 1084 ASSERT(contents->IsHeapObject()); |
| 1044 ASSERT(!contents->IsMarked()); | 1085 ASSERT(!Marking::IsMarked(contents)); |
| 1045 ASSERT(contents->IsFixedArray()); | 1086 ASSERT(contents->IsFixedArray()); |
| 1046 ASSERT(contents->length() >= 2); | 1087 ASSERT(contents->length() >= 2); |
| 1047 SetMark(contents); | 1088 SetMark(contents); |
| 1048 // Contents contains (value, details) pairs. If the details say that | 1089 // Contents contains (value, details) pairs. If the details say that |
| 1049 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or | 1090 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or |
| 1050 // NULL_DESCRIPTOR, we don't mark the value as live. Only for | 1091 // NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 1051 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a | 1092 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a |
| 1052 // Map*). | 1093 // Map*). |
| 1053 for (int i = 0; i < contents->length(); i += 2) { | 1094 for (int i = 0; i < contents->length(); i += 2) { |
| 1054 // If the pair (value, details) at index i, i+1 is not | 1095 // If the pair (value, details) at index i, i+1 is not |
| 1055 // a transition or null descriptor, mark the value. | 1096 // a transition or null descriptor, mark the value. |
| 1056 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 1097 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 1057 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { | 1098 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 1058 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); | 1099 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| 1059 if (object->IsHeapObject() && !object->IsMarked()) { | 1100 if (object->IsHeapObject() && !Marking::IsMarked(object)) { |
| 1060 SetMark(object); | 1101 SetMark(object); |
| 1061 marking_stack.Push(object); | 1102 marking_stack.Push(object); |
| 1062 } | 1103 } |
| 1063 } | 1104 } |
| 1064 } | 1105 } |
| 1065 // The DescriptorArray descriptors contains a pointer to its contents array, | 1106 // The DescriptorArray descriptors contains a pointer to its contents array, |
| 1066 // but the contents array is already marked. | 1107 // but the contents array is already marked. |
| 1067 marking_stack.Push(descriptors); | 1108 marking_stack.Push(descriptors); |
| 1068 } | 1109 } |
| 1069 | 1110 |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1081 ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array()); | 1122 ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array()); |
| 1082 } | 1123 } |
| 1083 } | 1124 } |
| 1084 } | 1125 } |
| 1085 } | 1126 } |
| 1086 | 1127 |
| 1087 | 1128 |
| 1088 static int OverflowObjectSize(HeapObject* obj) { | 1129 static int OverflowObjectSize(HeapObject* obj) { |
| 1089 // Recover the normal map pointer, it might be marked as live and | 1130 // Recover the normal map pointer, it might be marked as live and |
| 1090 // overflowed. | 1131 // overflowed. |
| 1091 MapWord map_word = obj->map_word(); | 1132 return obj->Size(); |
| 1092 map_word.ClearMark(); | |
| 1093 map_word.ClearOverflow(); | |
| 1094 return obj->SizeFromMap(map_word.ToMap()); | |
| 1095 } | 1133 } |
| 1096 | 1134 |
| 1097 | 1135 |
| 1098 // Fill the marking stack with overflowed objects returned by the given | 1136 // Fill the marking stack with overflowed objects returned by the given |
| 1099 // iterator. Stop when the marking stack is filled or the end of the space | 1137 // iterator. Stop when the marking stack is filled or the end of the space |
| 1100 // is reached, whichever comes first. | 1138 // is reached, whichever comes first. |
| 1101 template<class T> | 1139 template<class T> |
| 1102 static void ScanOverflowedObjects(T* it) { | 1140 static void ScanOverflowedObjects(T* it) { |
| 1141 #if 0 | |
| 1103 // The caller should ensure that the marking stack is initially not full, | 1142 // The caller should ensure that the marking stack is initially not full, |
| 1104 // so that we don't waste effort pointlessly scanning for objects. | 1143 // so that we don't waste effort pointlessly scanning for objects. |
| 1105 ASSERT(!marking_stack.is_full()); | 1144 ASSERT(!marking_stack.is_full()); |
| 1106 | 1145 |
| 1107 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { | 1146 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { |
| 1108 if (object->IsOverflowed()) { | 1147 if (object->IsOverflowed()) { |
| 1109 object->ClearOverflow(); | 1148 object->ClearOverflow(); |
| 1110 ASSERT(object->IsMarked()); | 1149 ASSERT(Marking::IsMarked(object)); |
| 1111 ASSERT(Heap::Contains(object)); | 1150 ASSERT(Heap::Contains(object)); |
| 1112 marking_stack.Push(object); | 1151 marking_stack.Push(object); |
| 1113 if (marking_stack.is_full()) return; | 1152 if (marking_stack.is_full()) return; |
| 1114 } | 1153 } |
| 1115 } | 1154 } |
| 1155 #endif | |
| 1156 UNREACHABLE(); | |
| 1116 } | 1157 } |
| 1117 | 1158 |
| 1118 | 1159 |
| 1119 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1160 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
| 1120 return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked(); | 1161 return (*p)->IsHeapObject() && |
| 1162 !Marking::IsMarked(HeapObject::cast(*p)); | |
| 1121 } | 1163 } |
| 1122 | 1164 |
| 1123 | 1165 |
| 1124 void MarkCompactCollector::MarkSymbolTable() { | 1166 void MarkCompactCollector::MarkSymbolTable() { |
| 1125 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); | 1167 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); |
| 1126 // Mark the symbol table itself. | 1168 // Mark the symbol table itself. |
| 1127 SetMark(symbol_table); | 1169 SetMark(symbol_table); |
| 1128 // Explicitly mark the prefix. | 1170 // Explicitly mark the prefix. |
| 1129 MarkingVisitor marker; | 1171 MarkingVisitor marker; |
| 1130 symbol_table->IteratePrefix(&marker); | 1172 symbol_table->IteratePrefix(&marker); |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 1152 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); | 1194 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); |
| 1153 | 1195 |
| 1154 for (int i = 0; i < object_groups->length(); i++) { | 1196 for (int i = 0; i < object_groups->length(); i++) { |
| 1155 ObjectGroup* entry = object_groups->at(i); | 1197 ObjectGroup* entry = object_groups->at(i); |
| 1156 if (entry == NULL) continue; | 1198 if (entry == NULL) continue; |
| 1157 | 1199 |
| 1158 List<Object**>& objects = entry->objects_; | 1200 List<Object**>& objects = entry->objects_; |
| 1159 bool group_marked = false; | 1201 bool group_marked = false; |
| 1160 for (int j = 0; j < objects.length(); j++) { | 1202 for (int j = 0; j < objects.length(); j++) { |
| 1161 Object* object = *objects[j]; | 1203 Object* object = *objects[j]; |
| 1162 if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) { | 1204 if (object->IsHeapObject() && Marking::IsMarked(HeapObject::cast(object))) { |
| 1163 group_marked = true; | 1205 group_marked = true; |
| 1164 break; | 1206 break; |
| 1165 } | 1207 } |
| 1166 } | 1208 } |
| 1167 | 1209 |
| 1168 if (!group_marked) continue; | 1210 if (!group_marked) continue; |
| 1169 | 1211 |
| 1170 // An object in the group is marked, so mark as gray all white heap | 1212 // An object in the group is marked, so mark as gray all white heap |
| 1171 // objects in the group. | 1213 // objects in the group. |
| 1172 for (int j = 0; j < objects.length(); ++j) { | 1214 for (int j = 0; j < objects.length(); ++j) { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1184 | 1226 |
| 1185 // Mark all objects reachable from the objects on the marking stack. | 1227 // Mark all objects reachable from the objects on the marking stack. |
| 1186 // Before: the marking stack contains zero or more heap object pointers. | 1228 // Before: the marking stack contains zero or more heap object pointers. |
| 1187 // After: the marking stack is empty, and all objects reachable from the | 1229 // After: the marking stack is empty, and all objects reachable from the |
| 1188 // marking stack have been marked, or are overflowed in the heap. | 1230 // marking stack have been marked, or are overflowed in the heap. |
| 1189 void MarkCompactCollector::EmptyMarkingStack() { | 1231 void MarkCompactCollector::EmptyMarkingStack() { |
| 1190 while (!marking_stack.is_empty()) { | 1232 while (!marking_stack.is_empty()) { |
| 1191 HeapObject* object = marking_stack.Pop(); | 1233 HeapObject* object = marking_stack.Pop(); |
| 1192 ASSERT(object->IsHeapObject()); | 1234 ASSERT(object->IsHeapObject()); |
| 1193 ASSERT(Heap::Contains(object)); | 1235 ASSERT(Heap::Contains(object)); |
| 1194 ASSERT(object->IsMarked()); | 1236 ASSERT(Marking::IsMarked(object)); |
| 1195 ASSERT(!object->IsOverflowed()); | 1237 ASSERT(!object->IsOverflowed()); |
| 1196 | 1238 |
| 1197 // Because the object is marked, we have to recover the original map | 1239 // Because the object is marked, we have to recover the original map |
|
Erik Corry
2011/01/07 12:13:21
comment out of date
| |
| 1198 // pointer and use it to mark the object's body. | 1240 // pointer and use it to mark the object's body. |
| 1199 MapWord map_word = object->map_word(); | 1241 Map* map = object->map(); |
| 1200 map_word.ClearMark(); | |
| 1201 Map* map = map_word.ToMap(); | |
| 1202 MarkObject(map); | 1242 MarkObject(map); |
| 1203 | 1243 |
| 1204 StaticMarkingVisitor::IterateBody(map, object); | 1244 StaticMarkingVisitor::IterateBody(map, object); |
| 1205 } | 1245 } |
| 1206 } | 1246 } |
| 1207 | 1247 |
| 1208 | 1248 |
| 1209 // Sweep the heap for overflowed objects, clear their overflow bits, and | 1249 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 1210 // push them on the marking stack. Stop early if the marking stack fills | 1250 // push them on the marking stack. Stop early if the marking stack fills |
| 1211 // before sweeping completes. If sweeping completes, there are no remaining | 1251 // before sweeping completes. If sweeping completes, there are no remaining |
| (...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1371 ASSERT(state_ == MARK_LIVE_OBJECTS); | 1411 ASSERT(state_ == MARK_LIVE_OBJECTS); |
| 1372 state_ = | 1412 state_ = |
| 1373 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; | 1413 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; |
| 1374 #endif | 1414 #endif |
| 1375 // Deallocate unmarked objects and clear marked bits for marked objects. | 1415 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 1376 Heap::lo_space()->FreeUnmarkedObjects(); | 1416 Heap::lo_space()->FreeUnmarkedObjects(); |
| 1377 } | 1417 } |
| 1378 | 1418 |
| 1379 | 1419 |
| 1380 // Safe to use during marking phase only. | 1420 // Safe to use during marking phase only. |
| 1381 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { | 1421 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { |
|
Erik Corry
2011/01/07 12:13:21
Remove or rename?
| |
| 1382 MapWord metamap = object->map_word(); | 1422 return object->map()->instance_type() == MAP_TYPE; |
| 1383 metamap.ClearMark(); | |
| 1384 return metamap.ToMap()->instance_type() == MAP_TYPE; | |
| 1385 } | 1423 } |
| 1386 | 1424 |
| 1387 | 1425 |
| 1388 void MarkCompactCollector::ClearNonLiveTransitions() { | 1426 void MarkCompactCollector::ClearNonLiveTransitions() { |
| 1389 HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject); | 1427 HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject); |
| 1390 // Iterate over the map space, setting map transitions that go from | 1428 // Iterate over the map space, setting map transitions that go from |
| 1391 // a marked map to an unmarked map to null transitions. At the same time, | 1429 // a marked map to an unmarked map to null transitions. At the same time, |
| 1392 // set all the prototype fields of maps back to their original value, | 1430 // set all the prototype fields of maps back to their original value, |
| 1393 // dropping the back pointers temporarily stored in the prototype field. | 1431 // dropping the back pointers temporarily stored in the prototype field. |
| 1394 // Setting the prototype field requires following the linked list of | 1432 // Setting the prototype field requires following the linked list of |
| 1395 // back pointers, reversing them all at once. This allows us to find | 1433 // back pointers, reversing them all at once. This allows us to find |
| 1396 // those maps with map transitions that need to be nulled, and only | 1434 // those maps with map transitions that need to be nulled, and only |
| 1397 // scan the descriptor arrays of those maps, not all maps. | 1435 // scan the descriptor arrays of those maps, not all maps. |
| 1398 // All of these actions are carried out only on maps of JSObjects | 1436 // All of these actions are carried out only on maps of JSObjects |
| 1399 // and related subtypes. | 1437 // and related subtypes. |
| 1400 for (HeapObject* obj = map_iterator.next(); | 1438 for (HeapObject* obj = map_iterator.next(); |
| 1401 obj != NULL; obj = map_iterator.next()) { | 1439 obj != NULL; obj = map_iterator.next()) { |
| 1402 Map* map = reinterpret_cast<Map*>(obj); | 1440 Map* map = reinterpret_cast<Map*>(obj); |
| 1403 if (!map->IsMarked() && map->IsByteArray()) continue; | 1441 if (!Marking::IsMarked(map) && map->IsByteArray()) continue; |
| 1404 | 1442 |
| 1405 ASSERT(SafeIsMap(map)); | 1443 ASSERT(SafeIsMap(map)); |
| 1406 // Only JSObject and subtypes have map transitions and back pointers. | 1444 // Only JSObject and subtypes have map transitions and back pointers. |
| 1407 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; | 1445 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; |
| 1408 if (map->instance_type() > JS_FUNCTION_TYPE) continue; | 1446 if (map->instance_type() > JS_FUNCTION_TYPE) continue; |
| 1409 | 1447 |
| 1410 if (map->IsMarked() && map->attached_to_shared_function_info()) { | 1448 if (Marking::IsMarked(map) && |
| 1449 map->attached_to_shared_function_info()) { | |
| 1411 // This map is used for inobject slack tracking and has been detached | 1450 // This map is used for inobject slack tracking and has been detached |
| 1412 // from SharedFunctionInfo during the mark phase. | 1451 // from SharedFunctionInfo during the mark phase. |
| 1413 // Since it survived the GC, reattach it now. | 1452 // Since it survived the GC, reattach it now. |
| 1414 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); | 1453 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); |
| 1415 } | 1454 } |
| 1416 | 1455 |
| 1417 // Follow the chain of back pointers to find the prototype. | 1456 // Follow the chain of back pointers to find the prototype. |
| 1418 Map* current = map; | 1457 Map* current = map; |
| 1419 while (SafeIsMap(current)) { | 1458 while (SafeIsMap(current)) { |
| 1420 current = reinterpret_cast<Map*>(current->prototype()); | 1459 current = reinterpret_cast<Map*>(current->prototype()); |
| 1421 ASSERT(current->IsHeapObject()); | 1460 ASSERT(current->IsHeapObject()); |
| 1422 } | 1461 } |
| 1423 Object* real_prototype = current; | 1462 Object* real_prototype = current; |
| 1424 | 1463 |
| 1425 // Follow back pointers, setting them to prototype, | 1464 // Follow back pointers, setting them to prototype, |
| 1426 // clearing map transitions when necessary. | 1465 // clearing map transitions when necessary. |
| 1427 current = map; | 1466 current = map; |
| 1428 bool on_dead_path = !current->IsMarked(); | 1467 bool on_dead_path = !Marking::IsMarked(current); |
| 1429 Object* next; | 1468 Object* next; |
| 1430 while (SafeIsMap(current)) { | 1469 while (SafeIsMap(current)) { |
| 1431 next = current->prototype(); | 1470 next = current->prototype(); |
| 1432 // There should never be a dead map above a live map. | 1471 // There should never be a dead map above a live map. |
| 1433 ASSERT(on_dead_path || current->IsMarked()); | 1472 ASSERT(on_dead_path || Marking::IsMarked(current)); |
| 1434 | 1473 |
| 1435 // A live map above a dead map indicates a dead transition. | 1474 // A live map above a dead map indicates a dead transition. |
| 1436 // This test will always be false on the first iteration. | 1475 // This test will always be false on the first iteration. |
| 1437 if (on_dead_path && current->IsMarked()) { | 1476 if (on_dead_path && Marking::IsMarked(current)) { |
| 1438 on_dead_path = false; | 1477 on_dead_path = false; |
| 1439 current->ClearNonLiveTransitions(real_prototype); | 1478 current->ClearNonLiveTransitions(real_prototype); |
| 1440 } | 1479 } |
| 1441 *HeapObject::RawField(current, Map::kPrototypeOffset) = | 1480 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
| 1442 real_prototype; | 1481 real_prototype; |
| 1443 current = reinterpret_cast<Map*>(next); | 1482 current = reinterpret_cast<Map*>(next); |
| 1444 } | 1483 } |
| 1445 } | 1484 } |
| 1446 } | 1485 } |
| 1447 | 1486 |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1576 MarkCompactCollector::tracer()-> | 1615 MarkCompactCollector::tracer()-> |
| 1577 increment_promoted_objects_size(object_size); | 1616 increment_promoted_objects_size(object_size); |
| 1578 return true; | 1617 return true; |
| 1579 } | 1618 } |
| 1580 } | 1619 } |
| 1581 | 1620 |
| 1582 return false; | 1621 return false; |
| 1583 } | 1622 } |
| 1584 | 1623 |
| 1585 | 1624 |
| 1586 static void SweepNewSpace(NewSpace* space) { | 1625 void MarkCompactCollector::SweepNewSpace(NewSpace* space) { |
| 1587 Heap::CheckNewSpaceExpansionCriteria(); | 1626 Heap::CheckNewSpaceExpansionCriteria(); |
| 1588 | 1627 |
| 1589 Address from_bottom = space->bottom(); | 1628 Address from_bottom = space->bottom(); |
| 1590 Address from_top = space->top(); | 1629 Address from_top = space->top(); |
| 1591 | 1630 |
| 1592 // Flip the semispaces. After flipping, to space is empty, from space has | 1631 // Flip the semispaces. After flipping, to space is empty, from space has |
| 1593 // live objects. | 1632 // live objects. |
| 1594 space->Flip(); | 1633 space->Flip(); |
| 1595 space->ResetAllocationInfo(); | 1634 space->ResetAllocationInfo(); |
| 1596 | 1635 |
| 1597 int size = 0; | 1636 int size = 0; |
| 1598 int survivors_size = 0; | 1637 int survivors_size = 0; |
| 1599 | 1638 |
| 1600 // First pass: traverse all objects in inactive semispace, remove marks, | 1639 // First pass: traverse all objects in inactive semispace, remove marks, |
| 1601 // migrate live objects and write forwarding addresses. | 1640 // migrate live objects and write forwarding addresses. |
| 1602 for (Address current = from_bottom; current < from_top; current += size) { | 1641 for (Address current = from_bottom; current < from_top; current += size) { |
| 1603 HeapObject* object = HeapObject::FromAddress(current); | 1642 HeapObject* object = HeapObject::FromAddress(current); |
| 1604 | 1643 |
| 1605 if (object->IsMarked()) { | 1644 if (Marking::IsMarked(object)) { |
| 1606 object->ClearMark(); | 1645 Marking::ClearMark(object); |
| 1607 MarkCompactCollector::tracer()->decrement_marked_count(); | 1646 MarkCompactCollector::tracer()->decrement_marked_count(); |
| 1608 | 1647 |
| 1609 size = object->Size(); | 1648 size = object->Size(); |
| 1610 survivors_size += size; | 1649 survivors_size += size; |
| 1611 | 1650 |
| 1612 // Aggressively promote young survivors to the old space. | 1651 // Aggressively promote young survivors to the old space. |
| 1613 if (TryPromoteObject(object, size)) { | 1652 if (TryPromoteObject(object, size)) { |
| 1614 continue; | 1653 continue; |
| 1615 } | 1654 } |
| 1616 | 1655 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1670 // Update pointers from external string table. | 1709 // Update pointers from external string table. |
| 1671 Heap::UpdateNewSpaceReferencesInExternalStringTable( | 1710 Heap::UpdateNewSpaceReferencesInExternalStringTable( |
| 1672 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1711 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| 1673 | 1712 |
| 1674 // All pointers were updated. Update auxiliary allocation info. | 1713 // All pointers were updated. Update auxiliary allocation info. |
| 1675 Heap::IncrementYoungSurvivorsCounter(survivors_size); | 1714 Heap::IncrementYoungSurvivorsCounter(survivors_size); |
| 1676 space->set_age_mark(space->top()); | 1715 space->set_age_mark(space->top()); |
| 1677 } | 1716 } |
| 1678 | 1717 |
| 1679 | 1718 |
| 1680 static void SweepSpace(PagedSpace* space) { | 1719 void MarkCompactCollector::SweepSpace(PagedSpace* space) { |
| 1681 PageIterator it(space, PageIterator::PAGES_IN_USE); | 1720 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 1682 | 1721 |
| 1683 // During sweeping of paged space we are trying to find longest sequences | 1722 // During sweeping of paged space we are trying to find longest sequences |
| 1684 // of pages without live objects and free them (instead of putting them on | 1723 // of pages without live objects and free them (instead of putting them on |
| 1685 // the free list). | 1724 // the free list). |
| 1686 | 1725 |
| 1687 // Page preceding current. | 1726 // Page preceding current. |
| 1688 Page* prev = Page::FromAddress(NULL); | 1727 Page* prev = Page::FromAddress(NULL); |
| 1689 | 1728 |
| 1690 // First empty page in a sequence. | 1729 // First empty page in a sequence. |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 1706 Page* p = it.next(); | 1745 Page* p = it.next(); |
| 1707 | 1746 |
| 1708 bool is_previous_alive = true; | 1747 bool is_previous_alive = true; |
| 1709 Address free_start = NULL; | 1748 Address free_start = NULL; |
| 1710 HeapObject* object; | 1749 HeapObject* object; |
| 1711 | 1750 |
| 1712 for (Address current = p->ObjectAreaStart(); | 1751 for (Address current = p->ObjectAreaStart(); |
| 1713 current < p->AllocationTop(); | 1752 current < p->AllocationTop(); |
| 1714 current += object->Size()) { | 1753 current += object->Size()) { |
| 1715 object = HeapObject::FromAddress(current); | 1754 object = HeapObject::FromAddress(current); |
| 1716 if (object->IsMarked()) { | 1755 if (Marking::IsMarked(object)) { |
| 1717 object->ClearMark(); | 1756 Marking::ClearMark(object); |
| 1718 MarkCompactCollector::tracer()->decrement_marked_count(); | 1757 MarkCompactCollector::tracer()->decrement_marked_count(); |
| 1719 | 1758 |
| 1720 if (!is_previous_alive) { // Transition from free to live. | 1759 if (!is_previous_alive) { // Transition from free to live. |
| 1721 space->DeallocateBlock(free_start, | 1760 space->DeallocateBlock(free_start, |
| 1722 static_cast<int>(current - free_start), | 1761 static_cast<int>(current - free_start), |
| 1723 true); | 1762 true); |
| 1724 is_previous_alive = true; | 1763 is_previous_alive = true; |
| 1725 } | 1764 } |
| 1726 } else { | 1765 } else { |
| 1727 MarkCompactCollector::ReportDeleteIfNeeded(object); | 1766 MarkCompactCollector::ReportDeleteIfNeeded(object); |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1899 #ifdef ENABLE_LOGGING_AND_PROFILING | 1938 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 1900 if (obj->IsCode()) { | 1939 if (obj->IsCode()) { |
| 1901 PROFILE(CodeDeleteEvent(obj->address())); | 1940 PROFILE(CodeDeleteEvent(obj->address())); |
| 1902 } else if (obj->IsJSFunction()) { | 1941 } else if (obj->IsJSFunction()) { |
| 1903 PROFILE(FunctionDeleteEvent(obj->address())); | 1942 PROFILE(FunctionDeleteEvent(obj->address())); |
| 1904 } | 1943 } |
| 1905 #endif | 1944 #endif |
| 1906 } | 1945 } |
| 1907 | 1946 |
| 1908 | 1947 |
| 1909 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) { | 1948 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) { |
|
Erik Corry
2011/01/07 12:13:21
This function is not needed
| |
| 1910 MapWord map_word = obj->map_word(); | 1949 return obj->Size(); |
| 1911 map_word.ClearMark(); | |
| 1912 return obj->SizeFromMap(map_word.ToMap()); | |
| 1913 } | 1950 } |
| 1914 | 1951 |
| 1915 | 1952 |
| 1916 void MarkCompactCollector::Initialize() { | 1953 void MarkCompactCollector::Initialize() { |
| 1917 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 1954 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 1918 StaticMarkingVisitor::Initialize(); | 1955 StaticMarkingVisitor::Initialize(); |
| 1919 } | 1956 } |
| 1920 | 1957 |
| 1921 | 1958 |
| 1922 } } // namespace v8::internal | 1959 } } // namespace v8::internal |
| OLD | NEW |