| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 145 // starting now. | 145 // starting now. |
| 146 Page::set_rset_state(Page::NOT_IN_USE); | 146 Page::set_rset_state(Page::NOT_IN_USE); |
| 147 } | 147 } |
| 148 #endif | 148 #endif |
| 149 | 149 |
| 150 PagedSpaces spaces; | 150 PagedSpaces spaces; |
| 151 while (PagedSpace* space = spaces.next()) { | 151 while (PagedSpace* space = spaces.next()) { |
| 152 space->PrepareForMarkCompact(compacting_collection_); | 152 space->PrepareForMarkCompact(compacting_collection_); |
| 153 } | 153 } |
| 154 | 154 |
| 155 Counters::global_objects.Set(0); | |
| 156 | |
| 157 #ifdef DEBUG | 155 #ifdef DEBUG |
| 158 live_bytes_ = 0; | 156 live_bytes_ = 0; |
| 159 live_young_objects_ = 0; | 157 live_young_objects_ = 0; |
| 160 live_old_pointer_objects_ = 0; | 158 live_old_pointer_objects_ = 0; |
| 161 live_old_data_objects_ = 0; | 159 live_old_data_objects_ = 0; |
| 162 live_code_objects_ = 0; | 160 live_code_objects_ = 0; |
| 163 live_map_objects_ = 0; | 161 live_map_objects_ = 0; |
| 164 live_lo_objects_ = 0; | 162 live_lo_objects_ = 0; |
| 165 #endif | 163 #endif |
| 166 } | 164 } |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 320 Code* CodeFromDerivedPointer(Address addr) { | 318 Code* CodeFromDerivedPointer(Address addr) { |
| 321 ASSERT(addr != NULL); | 319 ASSERT(addr != NULL); |
| 322 return reinterpret_cast<Code*>( | 320 return reinterpret_cast<Code*>( |
| 323 HeapObject::FromAddress(addr - Code::kHeaderSize)); | 321 HeapObject::FromAddress(addr - Code::kHeaderSize)); |
| 324 } | 322 } |
| 325 | 323 |
| 326 // Visit an unmarked object. | 324 // Visit an unmarked object. |
| 327 void VisitUnmarkedObject(HeapObject* obj) { | 325 void VisitUnmarkedObject(HeapObject* obj) { |
| 328 #ifdef DEBUG | 326 #ifdef DEBUG |
| 329 ASSERT(Heap::Contains(obj)); | 327 ASSERT(Heap::Contains(obj)); |
| 330 MarkCompactCollector::UpdateLiveObjectCount(obj); | |
| 331 ASSERT(!obj->IsMarked()); | 328 ASSERT(!obj->IsMarked()); |
| 332 #endif | 329 #endif |
| 333 Map* map = obj->map(); | 330 Map* map = obj->map(); |
| 334 obj->SetMark(); | 331 MarkCompactCollector::SetMark(obj); |
| 335 MarkCompactCollector::tracer()->increment_marked_count(); | |
| 336 // Mark the map pointer and the body. | 332 // Mark the map pointer and the body. |
| 337 MarkCompactCollector::MarkObject(map); | 333 MarkCompactCollector::MarkObject(map); |
| 338 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); | 334 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this); |
| 339 } | 335 } |
| 340 | 336 |
| 341 // Visit all unmarked objects pointed to by [start, end). | 337 // Visit all unmarked objects pointed to by [start, end). |
| 342 // Returns false if the operation fails (lack of stack space). | 338 // Returns false if the operation fails (lack of stack space). |
| 343 inline bool VisitUnmarkedObjects(Object** start, Object** end) { | 339 inline bool VisitUnmarkedObjects(Object** start, Object** end) { |
| 344 // Return false is we are close to the stack limit. | 340 // Return false is we are close to the stack limit. |
| 345 StackLimitCheck check; | 341 StackLimitCheck check; |
| (...skipping 27 matching lines...) Expand all Loading... |
| 373 private: | 369 private: |
| 374 MarkingVisitor stack_visitor_; | 370 MarkingVisitor stack_visitor_; |
| 375 | 371 |
| 376 void MarkObjectByPointer(Object** p) { | 372 void MarkObjectByPointer(Object** p) { |
| 377 if (!(*p)->IsHeapObject()) return; | 373 if (!(*p)->IsHeapObject()) return; |
| 378 | 374 |
| 379 // Replace flat cons strings in place. | 375 // Replace flat cons strings in place. |
| 380 HeapObject* object = ShortCircuitConsString(p); | 376 HeapObject* object = ShortCircuitConsString(p); |
| 381 if (object->IsMarked()) return; | 377 if (object->IsMarked()) return; |
| 382 | 378 |
| 383 #ifdef DEBUG | |
| 384 MarkCompactCollector::UpdateLiveObjectCount(object); | |
| 385 #endif | |
| 386 Map* map = object->map(); | 379 Map* map = object->map(); |
| 387 // Mark the object. | 380 // Mark the object. |
| 388 object->SetMark(); | 381 MarkCompactCollector::SetMark(object); |
| 389 MarkCompactCollector::tracer()->increment_marked_count(); | |
| 390 // Mark the map pointer and body, and push them on the marking stack. | 382 // Mark the map pointer and body, and push them on the marking stack. |
| 391 MarkCompactCollector::MarkObject(map); | 383 MarkCompactCollector::MarkObject(map); |
| 392 object->IterateBody(map->instance_type(), object->SizeFromMap(map), | 384 object->IterateBody(map->instance_type(), object->SizeFromMap(map), |
| 393 &stack_visitor_); | 385 &stack_visitor_); |
| 394 | 386 |
| 395 // Mark all the objects reachable from the map and body. May leave | 387 // Mark all the objects reachable from the map and body. May leave |
| 396 // overflowed objects in the heap. | 388 // overflowed objects in the heap. |
| 397 MarkCompactCollector::EmptyMarkingStack(&stack_visitor_); | 389 MarkCompactCollector::EmptyMarkingStack(&stack_visitor_); |
| 398 } | 390 } |
| 399 }; | 391 }; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 416 | 408 |
| 417 int PointersRemoved() { | 409 int PointersRemoved() { |
| 418 return pointers_removed_; | 410 return pointers_removed_; |
| 419 } | 411 } |
| 420 private: | 412 private: |
| 421 int pointers_removed_; | 413 int pointers_removed_; |
| 422 }; | 414 }; |
| 423 | 415 |
| 424 | 416 |
| 425 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { | 417 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { |
| 426 #ifdef DEBUG | |
| 427 UpdateLiveObjectCount(object); | |
| 428 #endif | |
| 429 ASSERT(!object->IsMarked()); | 418 ASSERT(!object->IsMarked()); |
| 430 if (object->IsJSGlobalObject()) Counters::global_objects.Increment(); | |
| 431 | |
| 432 tracer_->increment_marked_count(); | |
| 433 ASSERT(Heap::Contains(object)); | 419 ASSERT(Heap::Contains(object)); |
| 434 if (object->IsMap()) { | 420 if (object->IsMap()) { |
| 435 Map* map = Map::cast(object); | 421 Map* map = Map::cast(object); |
| 436 if (FLAG_cleanup_caches_in_maps_at_gc) { | 422 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 437 map->ClearCodeCache(); | 423 map->ClearCodeCache(); |
| 438 } | 424 } |
| 439 map->SetMark(); | 425 SetMark(map); |
| 440 if (FLAG_collect_maps && | 426 if (FLAG_collect_maps && |
| 441 map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 427 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 442 map->instance_type() <= JS_FUNCTION_TYPE) { | 428 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 443 MarkMapContents(map); | 429 MarkMapContents(map); |
| 444 } else { | 430 } else { |
| 445 marking_stack.Push(map); | 431 marking_stack.Push(map); |
| 446 } | 432 } |
| 447 } else { | 433 } else { |
| 448 object->SetMark(); | 434 SetMark(object); |
| 449 marking_stack.Push(object); | 435 marking_stack.Push(object); |
| 450 } | 436 } |
| 451 } | 437 } |
| 452 | 438 |
| 453 | 439 |
| 454 void MarkCompactCollector::MarkMapContents(Map* map) { | 440 void MarkCompactCollector::MarkMapContents(Map* map) { |
| 455 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( | 441 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( |
| 456 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); | 442 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); |
| 457 | 443 |
| 458 // Mark the Object* fields of the Map. | 444 // Mark the Object* fields of the Map. |
| 459 // Since the descriptor array has been marked already, it is fine | 445 // Since the descriptor array has been marked already, it is fine |
| 460 // that one of these fields contains a pointer to it. | 446 // that one of these fields contains a pointer to it. |
| 461 MarkingVisitor visitor; // Has no state or contents. | 447 MarkingVisitor visitor; // Has no state or contents. |
| 462 visitor.VisitPointers(HeapObject::RawField(map, Map::kPrototypeOffset), | 448 visitor.VisitPointers(HeapObject::RawField(map, Map::kPrototypeOffset), |
| 463 HeapObject::RawField(map, Map::kSize)); | 449 HeapObject::RawField(map, Map::kSize)); |
| 464 } | 450 } |
| 465 | 451 |
| 466 | 452 |
| 467 void MarkCompactCollector::MarkDescriptorArray( | 453 void MarkCompactCollector::MarkDescriptorArray( |
| 468 DescriptorArray *descriptors) { | 454 DescriptorArray *descriptors) { |
| 469 if (descriptors->IsMarked()) return; | 455 if (descriptors->IsMarked()) return; |
| 470 // Empty descriptor array is marked as a root before any maps are marked. | 456 // Empty descriptor array is marked as a root before any maps are marked. |
| 471 ASSERT(descriptors != Heap::empty_descriptor_array()); | 457 ASSERT(descriptors != Heap::empty_descriptor_array()); |
| 472 | 458 SetMark(descriptors); |
| 473 tracer_->increment_marked_count(); | |
| 474 #ifdef DEBUG | |
| 475 UpdateLiveObjectCount(descriptors); | |
| 476 #endif | |
| 477 descriptors->SetMark(); | |
| 478 | 459 |
| 479 FixedArray* contents = reinterpret_cast<FixedArray*>( | 460 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 480 descriptors->get(DescriptorArray::kContentArrayIndex)); | 461 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 481 ASSERT(contents->IsHeapObject()); | 462 ASSERT(contents->IsHeapObject()); |
| 482 ASSERT(!contents->IsMarked()); | 463 ASSERT(!contents->IsMarked()); |
| 483 ASSERT(contents->IsFixedArray()); | 464 ASSERT(contents->IsFixedArray()); |
| 484 ASSERT(contents->length() >= 2); | 465 ASSERT(contents->length() >= 2); |
| 485 tracer_->increment_marked_count(); | 466 SetMark(contents); |
| 486 #ifdef DEBUG | |
| 487 UpdateLiveObjectCount(contents); | |
| 488 #endif | |
| 489 contents->SetMark(); | |
| 490 // Contents contains (value, details) pairs. If the details say | 467 // Contents contains (value, details) pairs. If the details say |
| 491 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, | 468 // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, |
| 492 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for | 469 // or NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 493 // type MAP_TRANSITION is the value a Object* (a Map*). | 470 // type MAP_TRANSITION is the value a Object* (a Map*). |
| 494 for (int i = 0; i < contents->length(); i += 2) { | 471 for (int i = 0; i < contents->length(); i += 2) { |
| 495 // If the pair (value, details) at index i, i+1 is not | 472 // If the pair (value, details) at index i, i+1 is not |
| 496 // a transition or null descriptor, mark the value. | 473 // a transition or null descriptor, mark the value. |
| 497 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 474 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 498 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { | 475 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 499 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); | 476 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| 500 if (object->IsHeapObject() && !object->IsMarked()) { | 477 if (object->IsHeapObject() && !object->IsMarked()) { |
| 501 tracer_->increment_marked_count(); | 478 SetMark(object); |
| 502 #ifdef DEBUG | |
| 503 UpdateLiveObjectCount(object); | |
| 504 #endif | |
| 505 object->SetMark(); | |
| 506 marking_stack.Push(object); | 479 marking_stack.Push(object); |
| 507 } | 480 } |
| 508 } | 481 } |
| 509 } | 482 } |
| 510 // The DescriptorArray descriptors contains a pointer to its contents array, | 483 // The DescriptorArray descriptors contains a pointer to its contents array, |
| 511 // but the contents array is already marked. | 484 // but the contents array is already marked. |
| 512 marking_stack.Push(descriptors); | 485 marking_stack.Push(descriptors); |
| 513 } | 486 } |
| 514 | 487 |
| 515 | 488 |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 571 | 544 |
| 572 void MarkCompactCollector::ProcessRoots(RootMarkingVisitor* visitor) { | 545 void MarkCompactCollector::ProcessRoots(RootMarkingVisitor* visitor) { |
| 573 // Mark the heap roots gray, including global variables, stack variables, | 546 // Mark the heap roots gray, including global variables, stack variables, |
| 574 // etc. | 547 // etc. |
| 575 Heap::IterateStrongRoots(visitor); | 548 Heap::IterateStrongRoots(visitor); |
| 576 | 549 |
| 577 // Take care of the symbol table specially. | 550 // Take care of the symbol table specially. |
| 578 SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table()); | 551 SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table()); |
| 579 // 1. Mark the prefix of the symbol table gray. | 552 // 1. Mark the prefix of the symbol table gray. |
| 580 symbol_table->IteratePrefix(visitor); | 553 symbol_table->IteratePrefix(visitor); |
| 581 #ifdef DEBUG | |
| 582 UpdateLiveObjectCount(symbol_table); | |
| 583 #endif | |
| 584 // 2. Mark the symbol table black (ie, do not push it on the marking stack | 554 // 2. Mark the symbol table black (ie, do not push it on the marking stack |
| 585 // or mark it overflowed). | 555 // or mark it overflowed). |
| 586 symbol_table->SetMark(); | 556 SetMark(symbol_table); |
| 587 tracer_->increment_marked_count(); | |
| 588 | 557 |
| 589 // There may be overflowed objects in the heap. Visit them now. | 558 // There may be overflowed objects in the heap. Visit them now. |
| 590 while (marking_stack.overflowed()) { | 559 while (marking_stack.overflowed()) { |
| 591 RefillMarkingStack(); | 560 RefillMarkingStack(); |
| 592 EmptyMarkingStack(visitor->stack_visitor()); | 561 EmptyMarkingStack(visitor->stack_visitor()); |
| 593 } | 562 } |
| 594 } | 563 } |
| 595 | 564 |
| 596 | 565 |
| 597 void MarkCompactCollector::MarkObjectGroups() { | 566 void MarkCompactCollector::MarkObjectGroups() { |
| (...skipping 1320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1918 | 1887 |
| 1919 void MarkCompactCollector::RebuildRSets() { | 1888 void MarkCompactCollector::RebuildRSets() { |
| 1920 #ifdef DEBUG | 1889 #ifdef DEBUG |
| 1921 ASSERT(state_ == RELOCATE_OBJECTS); | 1890 ASSERT(state_ == RELOCATE_OBJECTS); |
| 1922 state_ = REBUILD_RSETS; | 1891 state_ = REBUILD_RSETS; |
| 1923 #endif | 1892 #endif |
| 1924 Heap::RebuildRSets(); | 1893 Heap::RebuildRSets(); |
| 1925 } | 1894 } |
| 1926 | 1895 |
| 1927 } } // namespace v8::internal | 1896 } } // namespace v8::internal |
| OLD | NEW |