Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/spaces.h" | 5 #include "src/heap/spaces.h" |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
| 9 #include "src/full-codegen/full-codegen.h" | 9 #include "src/full-codegen/full-codegen.h" |
| 10 #include "src/heap/slots-buffer.h" | 10 #include "src/heap/slots-buffer.h" |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 63 cur_page = cur_page->next_page(); | 63 cur_page = cur_page->next_page(); |
| 64 if (cur_page == space_->anchor()) return false; | 64 if (cur_page == space_->anchor()) return false; |
| 65 cur_page->heap()->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( | 65 cur_page->heap()->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( |
| 66 cur_page); | 66 cur_page); |
| 67 cur_addr_ = cur_page->area_start(); | 67 cur_addr_ = cur_page->area_start(); |
| 68 cur_end_ = cur_page->area_end(); | 68 cur_end_ = cur_page->area_end(); |
| 69 DCHECK(cur_page->SweepingDone()); | 69 DCHECK(cur_page->SweepingDone()); |
| 70 return true; | 70 return true; |
| 71 } | 71 } |
| 72 | 72 |
| 73 PauseAllocationObserversScope::PauseAllocationObserversScope(Heap* heap) | |
| 74 : heap_(heap) { | |
| 75 heap_->new_space()->PauseAllocationObservers(); | |
|
Hannes Payer (out of office)
2016/02/08 10:13:48
Use the AllSpaces iterator.
mattloring
2016/02/09 20:20:04
Done.
| |
| 76 heap_->old_space()->PauseAllocationObservers(); | |
| 77 heap_->code_space()->PauseAllocationObservers(); | |
| 78 heap_->map_space()->PauseAllocationObservers(); | |
| 79 heap_->lo_space()->PauseAllocationObservers(); | |
| 80 } | |
| 81 | |
| 82 PauseAllocationObserversScope::~PauseAllocationObserversScope() { | |
|
Hannes Payer (out of office)
2016/02/08 10:13:48
Use the AllSpaces iterator.
mattloring
2016/02/09 20:20:04
Done.
| |
| 83 heap_->new_space()->ResumeAllocationObservers(); | |
| 84 heap_->old_space()->ResumeAllocationObservers(); | |
| 85 heap_->code_space()->ResumeAllocationObservers(); | |
| 86 heap_->map_space()->ResumeAllocationObservers(); | |
| 87 heap_->lo_space()->ResumeAllocationObservers(); | |
| 88 } | |
| 73 | 89 |
| 74 // ----------------------------------------------------------------------------- | 90 // ----------------------------------------------------------------------------- |
| 75 // CodeRange | 91 // CodeRange |
| 76 | 92 |
| 77 | 93 |
| 78 CodeRange::CodeRange(Isolate* isolate) | 94 CodeRange::CodeRange(Isolate* isolate) |
| 79 : isolate_(isolate), | 95 : isolate_(isolate), |
| 80 code_range_(NULL), | 96 code_range_(NULL), |
| 81 free_list_(0), | 97 free_list_(0), |
| 82 allocation_list_(0), | 98 allocation_list_(0), |
| (...skipping 858 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 941 | 957 |
| 942 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::NEW_SPACE) == | 958 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::NEW_SPACE) == |
| 943 ObjectSpace::kObjectSpaceNewSpace); | 959 ObjectSpace::kObjectSpaceNewSpace); |
| 944 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) == | 960 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) == |
| 945 ObjectSpace::kObjectSpaceOldSpace); | 961 ObjectSpace::kObjectSpaceOldSpace); |
| 946 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) == | 962 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) == |
| 947 ObjectSpace::kObjectSpaceCodeSpace); | 963 ObjectSpace::kObjectSpaceCodeSpace); |
| 948 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::MAP_SPACE) == | 964 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::MAP_SPACE) == |
| 949 ObjectSpace::kObjectSpaceMapSpace); | 965 ObjectSpace::kObjectSpaceMapSpace); |
| 950 | 966 |
| 967 void Space::AllocationStep(Address soon_object, int size) { | |
| 968 if (!allocation_observers_paused_) { | |
| 969 for (int i = 0; i < allocation_observers_->length(); ++i) { | |
| 970 AllocationObserver* o = (*allocation_observers_)[i]; | |
| 971 o->AllocationStep(size, soon_object, size); | |
| 972 } | |
| 973 } | |
| 974 } | |
| 951 | 975 |
| 952 PagedSpace::PagedSpace(Heap* heap, AllocationSpace space, | 976 PagedSpace::PagedSpace(Heap* heap, AllocationSpace space, |
| 953 Executability executable) | 977 Executability executable) |
| 954 : Space(heap, space, executable), free_list_(this) { | 978 : Space(heap, space, executable), free_list_(this) { |
| 955 area_size_ = MemoryAllocator::PageAreaSize(space); | 979 area_size_ = MemoryAllocator::PageAreaSize(space); |
| 956 accounting_stats_.Clear(); | 980 accounting_stats_.Clear(); |
| 957 | 981 |
| 958 allocation_info_.Reset(nullptr, nullptr); | 982 allocation_info_.Reset(nullptr, nullptr); |
| 959 | 983 |
| 960 anchor_.InitializeAsAnchor(this); | 984 anchor_.InitializeAsAnchor(this); |
| (...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1502 InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0); | 1526 InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0); |
| 1503 } | 1527 } |
| 1504 | 1528 |
| 1505 | 1529 |
| 1506 void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) { | 1530 void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) { |
| 1507 if (heap()->inline_allocation_disabled()) { | 1531 if (heap()->inline_allocation_disabled()) { |
| 1508 // Lowest limit when linear allocation was disabled. | 1532 // Lowest limit when linear allocation was disabled. |
| 1509 Address high = to_space_.page_high(); | 1533 Address high = to_space_.page_high(); |
| 1510 Address new_top = allocation_info_.top() + size_in_bytes; | 1534 Address new_top = allocation_info_.top() + size_in_bytes; |
| 1511 allocation_info_.set_limit(Min(new_top, high)); | 1535 allocation_info_.set_limit(Min(new_top, high)); |
| 1512 } else if (inline_allocation_observers_paused_ || | 1536 } else if (allocation_observers_paused_ || top_on_previous_step_ == 0) { |
| 1513 top_on_previous_step_ == 0) { | |
| 1514 // Normal limit is the end of the current page. | 1537 // Normal limit is the end of the current page. |
| 1515 allocation_info_.set_limit(to_space_.page_high()); | 1538 allocation_info_.set_limit(to_space_.page_high()); |
| 1516 } else { | 1539 } else { |
| 1517 // Lower limit during incremental marking. | 1540 // Lower limit during incremental marking. |
| 1518 Address high = to_space_.page_high(); | 1541 Address high = to_space_.page_high(); |
| 1519 Address new_top = allocation_info_.top() + size_in_bytes; | 1542 Address new_top = allocation_info_.top() + size_in_bytes; |
| 1520 Address new_limit = new_top + GetNextInlineAllocationStepSize() - 1; | 1543 Address new_limit = new_top + GetNextInlineAllocationStepSize() - 1; |
| 1521 allocation_info_.set_limit(Min(new_limit, high)); | 1544 allocation_info_.set_limit(Min(new_limit, high)); |
| 1522 } | 1545 } |
| 1523 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 1546 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1584 Address new_top = old_top + aligned_size_in_bytes; | 1607 Address new_top = old_top + aligned_size_in_bytes; |
| 1585 Address soon_object = old_top + filler_size; | 1608 Address soon_object = old_top + filler_size; |
| 1586 InlineAllocationStep(new_top, new_top, soon_object, size_in_bytes); | 1609 InlineAllocationStep(new_top, new_top, soon_object, size_in_bytes); |
| 1587 UpdateInlineAllocationLimit(aligned_size_in_bytes); | 1610 UpdateInlineAllocationLimit(aligned_size_in_bytes); |
| 1588 } | 1611 } |
| 1589 return true; | 1612 return true; |
| 1590 } | 1613 } |
| 1591 | 1614 |
| 1592 | 1615 |
| 1593 void NewSpace::StartNextInlineAllocationStep() { | 1616 void NewSpace::StartNextInlineAllocationStep() { |
| 1594 if (!inline_allocation_observers_paused_) { | 1617 if (!allocation_observers_paused_) { |
| 1595 top_on_previous_step_ = | 1618 top_on_previous_step_ = |
| 1596 inline_allocation_observers_.length() ? allocation_info_.top() : 0; | 1619 allocation_observers_->length() ? allocation_info_.top() : 0; |
| 1597 UpdateInlineAllocationLimit(0); | 1620 UpdateInlineAllocationLimit(0); |
| 1598 } | 1621 } |
| 1599 } | 1622 } |
| 1600 | 1623 |
| 1601 | 1624 |
| 1602 intptr_t NewSpace::GetNextInlineAllocationStepSize() { | 1625 intptr_t NewSpace::GetNextInlineAllocationStepSize() { |
| 1603 intptr_t next_step = 0; | 1626 intptr_t next_step = 0; |
| 1604 for (int i = 0; i < inline_allocation_observers_.length(); ++i) { | 1627 for (int i = 0; i < allocation_observers_->length(); ++i) { |
| 1605 InlineAllocationObserver* o = inline_allocation_observers_[i]; | 1628 AllocationObserver* o = (*allocation_observers_)[i]; |
| 1606 next_step = next_step ? Min(next_step, o->bytes_to_next_step()) | 1629 next_step = next_step ? Min(next_step, o->bytes_to_next_step()) |
| 1607 : o->bytes_to_next_step(); | 1630 : o->bytes_to_next_step(); |
| 1608 } | 1631 } |
| 1609 DCHECK(inline_allocation_observers_.length() == 0 || next_step != 0); | 1632 DCHECK(allocation_observers_->length() == 0 || next_step != 0); |
| 1610 return next_step; | 1633 return next_step; |
| 1611 } | 1634 } |
| 1612 | 1635 |
| 1613 | 1636 void NewSpace::AddAllocationObserver(AllocationObserver* observer) { |
| 1614 void NewSpace::AddInlineAllocationObserver(InlineAllocationObserver* observer) { | 1637 Space::AddAllocationObserver(observer); |
| 1615 inline_allocation_observers_.Add(observer); | |
| 1616 StartNextInlineAllocationStep(); | 1638 StartNextInlineAllocationStep(); |
| 1617 } | 1639 } |
| 1618 | 1640 |
| 1619 | 1641 void NewSpace::RemoveAllocationObserver(AllocationObserver* observer) { |
| 1620 void NewSpace::RemoveInlineAllocationObserver( | 1642 Space::RemoveAllocationObserver(observer); |
| 1621 InlineAllocationObserver* observer) { | |
| 1622 bool removed = inline_allocation_observers_.RemoveElement(observer); | |
| 1623 // Only used in assertion. Suppress unused variable warning. | |
| 1624 static_cast<void>(removed); | |
| 1625 DCHECK(removed); | |
| 1626 StartNextInlineAllocationStep(); | 1643 StartNextInlineAllocationStep(); |
| 1627 } | 1644 } |
| 1628 | 1645 |
| 1629 | 1646 void NewSpace::PauseAllocationObservers() { |
| 1630 void NewSpace::PauseInlineAllocationObservers() { | |
| 1631 // Do a step to account for memory allocated so far. | 1647 // Do a step to account for memory allocated so far. |
| 1632 InlineAllocationStep(top(), top(), nullptr, 0); | 1648 InlineAllocationStep(top(), top(), nullptr, 0); |
| 1633 inline_allocation_observers_paused_ = true; | 1649 Space::PauseAllocationObservers(); |
| 1634 top_on_previous_step_ = 0; | 1650 top_on_previous_step_ = 0; |
| 1635 UpdateInlineAllocationLimit(0); | 1651 UpdateInlineAllocationLimit(0); |
| 1636 } | 1652 } |
| 1637 | 1653 |
| 1638 | 1654 void NewSpace::ResumeAllocationObservers() { |
| 1639 void NewSpace::ResumeInlineAllocationObservers() { | |
| 1640 DCHECK(top_on_previous_step_ == 0); | 1655 DCHECK(top_on_previous_step_ == 0); |
| 1641 inline_allocation_observers_paused_ = false; | 1656 Space::ResumeAllocationObservers(); |
| 1642 StartNextInlineAllocationStep(); | 1657 StartNextInlineAllocationStep(); |
| 1643 } | 1658 } |
| 1644 | 1659 |
| 1645 | 1660 |
| 1646 void NewSpace::InlineAllocationStep(Address top, Address new_top, | 1661 void NewSpace::InlineAllocationStep(Address top, Address new_top, |
| 1647 Address soon_object, size_t size) { | 1662 Address soon_object, size_t size) { |
| 1648 if (top_on_previous_step_) { | 1663 if (top_on_previous_step_) { |
| 1649 int bytes_allocated = static_cast<int>(top - top_on_previous_step_); | 1664 int bytes_allocated = static_cast<int>(top - top_on_previous_step_); |
| 1650 for (int i = 0; i < inline_allocation_observers_.length(); ++i) { | 1665 for (int i = 0; i < allocation_observers_->length(); ++i) { |
| 1651 inline_allocation_observers_[i]->InlineAllocationStep(bytes_allocated, | 1666 (*allocation_observers_)[i]->AllocationStep(bytes_allocated, soon_object, |
| 1652 soon_object, size); | 1667 size); |
| 1653 } | 1668 } |
| 1654 top_on_previous_step_ = new_top; | 1669 top_on_previous_step_ = new_top; |
| 1655 } | 1670 } |
| 1656 } | 1671 } |
| 1657 | 1672 |
| 1658 #ifdef VERIFY_HEAP | 1673 #ifdef VERIFY_HEAP |
| 1659 // We do not use the SemiSpaceIterator because verification doesn't assume | 1674 // We do not use the SemiSpaceIterator because verification doesn't assume |
| 1660 // that it works (it depends on the invariants we are checking). | 1675 // that it works (it depends on the invariants we are checking). |
| 1661 void NewSpace::Verify() { | 1676 void NewSpace::Verify() { |
| 1662 // The allocation pointer should be in the space or at the very end. | 1677 // The allocation pointer should be in the space or at the very end. |
| (...skipping 880 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2543 // if it is big enough. | 2558 // if it is big enough. |
| 2544 owner_->Free(owner_->top(), old_linear_size); | 2559 owner_->Free(owner_->top(), old_linear_size); |
| 2545 owner_->SetTopAndLimit(nullptr, nullptr); | 2560 owner_->SetTopAndLimit(nullptr, nullptr); |
| 2546 | 2561 |
| 2547 owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes - | 2562 owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes - |
| 2548 old_linear_size); | 2563 old_linear_size); |
| 2549 | 2564 |
| 2550 int new_node_size = 0; | 2565 int new_node_size = 0; |
| 2551 FreeSpace* new_node = FindNodeFor(size_in_bytes, &new_node_size); | 2566 FreeSpace* new_node = FindNodeFor(size_in_bytes, &new_node_size); |
| 2552 if (new_node == nullptr) return nullptr; | 2567 if (new_node == nullptr) return nullptr; |
| 2568 owner_->AllocationStep(new_node->address(), size_in_bytes); | |
| 2553 | 2569 |
| 2554 int bytes_left = new_node_size - size_in_bytes; | 2570 int bytes_left = new_node_size - size_in_bytes; |
| 2555 DCHECK(bytes_left >= 0); | 2571 DCHECK(bytes_left >= 0); |
| 2556 | 2572 |
| 2557 #ifdef DEBUG | 2573 #ifdef DEBUG |
| 2558 for (int i = 0; i < size_in_bytes / kPointerSize; i++) { | 2574 for (int i = 0; i < size_in_bytes / kPointerSize; i++) { |
| 2559 reinterpret_cast<Object**>(new_node->address())[i] = | 2575 reinterpret_cast<Object**>(new_node->address())[i] = |
| 2560 Smi::FromInt(kCodeZapValue); | 2576 Smi::FromInt(kCodeZapValue); |
| 2561 } | 2577 } |
| 2562 #endif | 2578 #endif |
| (...skipping 491 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3054 | 3070 |
| 3055 if (Heap::ShouldZapGarbage()) { | 3071 if (Heap::ShouldZapGarbage()) { |
| 3056 // Make the object consistent so the heap can be verified in OldSpaceStep. | 3072 // Make the object consistent so the heap can be verified in OldSpaceStep. |
| 3057 // We only need to do this in debug builds or if verify_heap is on. | 3073 // We only need to do this in debug builds or if verify_heap is on. |
| 3058 reinterpret_cast<Object**>(object->address())[0] = | 3074 reinterpret_cast<Object**>(object->address())[0] = |
| 3059 heap()->fixed_array_map(); | 3075 heap()->fixed_array_map(); |
| 3060 reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0); | 3076 reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0); |
| 3061 } | 3077 } |
| 3062 | 3078 |
| 3063 heap()->incremental_marking()->OldSpaceStep(object_size); | 3079 heap()->incremental_marking()->OldSpaceStep(object_size); |
| 3080 AllocationStep(object->address(), object_size); | |
| 3064 return object; | 3081 return object; |
| 3065 } | 3082 } |
| 3066 | 3083 |
| 3067 | 3084 |
| 3068 size_t LargeObjectSpace::CommittedPhysicalMemory() { | 3085 size_t LargeObjectSpace::CommittedPhysicalMemory() { |
| 3069 if (!base::VirtualMemory::HasLazyCommits()) return CommittedMemory(); | 3086 if (!base::VirtualMemory::HasLazyCommits()) return CommittedMemory(); |
| 3070 size_t size = 0; | 3087 size_t size = 0; |
| 3071 LargePage* current = first_page_; | 3088 LargePage* current = first_page_; |
| 3072 while (current != NULL) { | 3089 while (current != NULL) { |
| 3073 size += current->CommittedPhysicalMemory(); | 3090 size += current->CommittedPhysicalMemory(); |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3285 object->ShortPrint(); | 3302 object->ShortPrint(); |
| 3286 PrintF("\n"); | 3303 PrintF("\n"); |
| 3287 } | 3304 } |
| 3288 printf(" --------------------------------------\n"); | 3305 printf(" --------------------------------------\n"); |
| 3289 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3306 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
| 3290 } | 3307 } |
| 3291 | 3308 |
| 3292 #endif // DEBUG | 3309 #endif // DEBUG |
| 3293 } // namespace internal | 3310 } // namespace internal |
| 3294 } // namespace v8 | 3311 } // namespace v8 |
| OLD | NEW |