OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2656 ASSERT(HAS_HEAP_OBJECT_TAG(kFromSpaceZapValue)); | 2656 ASSERT(HAS_HEAP_OBJECT_TAG(kFromSpaceZapValue)); |
2657 for (Address a = new_space_.FromSpaceLow(); | 2657 for (Address a = new_space_.FromSpaceLow(); |
2658 a < new_space_.FromSpaceHigh(); | 2658 a < new_space_.FromSpaceHigh(); |
2659 a += kPointerSize) { | 2659 a += kPointerSize) { |
2660 Memory::Address_at(a) = kFromSpaceZapValue; | 2660 Memory::Address_at(a) = kFromSpaceZapValue; |
2661 } | 2661 } |
2662 } | 2662 } |
2663 #endif // DEBUG | 2663 #endif // DEBUG |
2664 | 2664 |
2665 | 2665 |
2666 void Heap::IterateRSetRange(Address object_start, | 2666 int Heap::IterateRSetRange(Address object_start, |
2667 Address object_end, | 2667 Address object_end, |
2668 Address rset_start, | 2668 Address rset_start, |
2669 ObjectSlotCallback copy_object_func) { | 2669 ObjectSlotCallback copy_object_func) { |
2670 Address object_address = object_start; | 2670 Address object_address = object_start; |
2671 Address rset_address = rset_start; | 2671 Address rset_address = rset_start; |
| 2672 int set_bits_count = 0; |
2672 | 2673 |
2673 // Loop over all the pointers in [object_start, object_end). | 2674 // Loop over all the pointers in [object_start, object_end). |
2674 while (object_address < object_end) { | 2675 while (object_address < object_end) { |
2675 uint32_t rset_word = Memory::uint32_at(rset_address); | 2676 uint32_t rset_word = Memory::uint32_at(rset_address); |
2676 if (rset_word != 0) { | 2677 if (rset_word != 0) { |
2677 uint32_t result_rset = rset_word; | 2678 uint32_t result_rset = rset_word; |
2678 for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) { | 2679 for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) { |
2679 // Do not dereference pointers at or past object_end. | 2680 // Do not dereference pointers at or past object_end. |
2680 if ((rset_word & bitmask) != 0 && object_address < object_end) { | 2681 if ((rset_word & bitmask) != 0 && object_address < object_end) { |
2681 Object** object_p = reinterpret_cast<Object**>(object_address); | 2682 Object** object_p = reinterpret_cast<Object**>(object_address); |
2682 if (Heap::InNewSpace(*object_p)) { | 2683 if (Heap::InNewSpace(*object_p)) { |
2683 copy_object_func(reinterpret_cast<HeapObject**>(object_p)); | 2684 copy_object_func(reinterpret_cast<HeapObject**>(object_p)); |
2684 } | 2685 } |
2685 // If this pointer does not need to be remembered anymore, clear | 2686 // If this pointer does not need to be remembered anymore, clear |
2686 // the remembered set bit. | 2687 // the remembered set bit. |
2687 if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask; | 2688 if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask; |
| 2689 set_bits_count++; |
2688 } | 2690 } |
2689 object_address += kPointerSize; | 2691 object_address += kPointerSize; |
2690 } | 2692 } |
2691 // Update the remembered set if it has changed. | 2693 // Update the remembered set if it has changed. |
2692 if (result_rset != rset_word) { | 2694 if (result_rset != rset_word) { |
2693 Memory::uint32_at(rset_address) = result_rset; | 2695 Memory::uint32_at(rset_address) = result_rset; |
2694 } | 2696 } |
2695 } else { | 2697 } else { |
2696 // No bits in the word were set. This is the common case. | 2698 // No bits in the word were set. This is the common case. |
2697 object_address += kPointerSize * kBitsPerInt; | 2699 object_address += kPointerSize * kBitsPerInt; |
2698 } | 2700 } |
2699 rset_address += kIntSize; | 2701 rset_address += kIntSize; |
2700 } | 2702 } |
| 2703 return set_bits_count; |
2701 } | 2704 } |
2702 | 2705 |
2703 | 2706 |
2704 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { | 2707 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { |
2705 ASSERT(Page::is_rset_in_use()); | 2708 ASSERT(Page::is_rset_in_use()); |
2706 ASSERT(space == old_pointer_space_ || space == map_space_); | 2709 ASSERT(space == old_pointer_space_ || space == map_space_); |
| 2710 |
| 2711 static void* paged_rset_histogram = StatsTable::CreateHistogram( |
| 2712 "V8.RSet_Paged", |
| 2713 0, |
| 2714 Page::kObjectAreaSize / kPointerSize, |
| 2715 30); |
2707 | 2716 |
2708 PageIterator it(space, PageIterator::PAGES_IN_USE); | 2717 PageIterator it(space, PageIterator::PAGES_IN_USE); |
2709 while (it.has_next()) { | 2718 while (it.has_next()) { |
2710 Page* page = it.next(); | 2719 Page* page = it.next(); |
2711 IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(), | 2720 int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(), |
2712 page->RSetStart(), copy_object_func); | 2721 page->RSetStart(), copy_object_func); |
| 2722 if (paged_rset_histogram != NULL) { |
| 2723 StatsTable::AddHistogramSample(paged_rset_histogram, count); |
| 2724 } |
2713 } | 2725 } |
2714 } | 2726 } |
2715 | 2727 |
2716 | 2728 |
2717 #ifdef DEBUG | 2729 #ifdef DEBUG |
2718 #define SYNCHRONIZE_TAG(tag) v->Synchronize(tag) | 2730 #define SYNCHRONIZE_TAG(tag) v->Synchronize(tag) |
2719 #else | 2731 #else |
2720 #define SYNCHRONIZE_TAG(tag) | 2732 #define SYNCHRONIZE_TAG(tag) |
2721 #endif | 2733 #endif |
2722 | 2734 |
(...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3459 #ifdef DEBUG | 3471 #ifdef DEBUG |
3460 bool Heap::GarbageCollectionGreedyCheck() { | 3472 bool Heap::GarbageCollectionGreedyCheck() { |
3461 ASSERT(FLAG_gc_greedy); | 3473 ASSERT(FLAG_gc_greedy); |
3462 if (Bootstrapper::IsActive()) return true; | 3474 if (Bootstrapper::IsActive()) return true; |
3463 if (disallow_allocation_failure()) return true; | 3475 if (disallow_allocation_failure()) return true; |
3464 return CollectGarbage(0, NEW_SPACE); | 3476 return CollectGarbage(0, NEW_SPACE); |
3465 } | 3477 } |
3466 #endif | 3478 #endif |
3467 | 3479 |
3468 } } // namespace v8::internal | 3480 } } // namespace v8::internal |
OLD | NEW |