| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/heap.h" | 5 #include "vm/heap.h" |
| 6 | 6 |
| 7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
| 8 #include "platform/utils.h" | 8 #include "platform/utils.h" |
| 9 #include "vm/flags.h" | 9 #include "vm/flags.h" |
| 10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 51 Heap::~Heap() { | 51 Heap::~Heap() { |
| 52 delete barrier_; | 52 delete barrier_; |
| 53 delete barrier_done_; | 53 delete barrier_done_; |
| 54 | 54 |
| 55 for (int sel = 0; sel < kNumWeakSelectors; sel++) { | 55 for (int sel = 0; sel < kNumWeakSelectors; sel++) { |
| 56 delete new_weak_tables_[sel]; | 56 delete new_weak_tables_[sel]; |
| 57 delete old_weak_tables_[sel]; | 57 delete old_weak_tables_[sel]; |
| 58 } | 58 } |
| 59 } | 59 } |
| 60 | 60 |
| 61 void Heap::FillRemainingTLAB(Thread* thread) { | |
| 62 uword start = thread->top(); | |
| 63 uword end = thread->end(); | |
| 64 ASSERT(end >= start); | |
| 65 intptr_t size = end - start; | |
| 66 ASSERT(Utils::IsAligned(size, kObjectAlignment)); | |
| 67 if (size >= kObjectAlignment) { | |
| 68 FreeListElement::AsElement(start, size); | |
| 69 ASSERT(RawObject::FromAddr(start)->Size() == size); | |
| 70 ASSERT((start + size) == new_space_.top()); | |
| 71 } | |
| 72 } | |
| 73 | |
| 74 void Heap::AbandonRemainingTLAB(Thread* thread) { | |
| 75 FillRemainingTLAB(thread); | |
| 76 thread->set_top(0); | |
| 77 thread->set_end(0); | |
| 78 } | |
| 79 | |
| 80 intptr_t Heap::CalculateTLABSize() { | |
| 81 intptr_t size = new_space_.end() - new_space_.top(); | |
| 82 return Utils::RoundDown(size, kObjectAlignment); | |
| 83 } | |
| 84 | |
| 85 uword Heap::AllocateNew(intptr_t size) { | 61 uword Heap::AllocateNew(intptr_t size) { |
| 86 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 62 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
| 87 // Currently, only the Dart thread may allocate in new space. | 63 // Currently, only the Dart thread may allocate in new space. |
| 88 isolate()->AssertCurrentThreadIsMutator(); | 64 isolate()->AssertCurrentThreadIsMutator(); |
| 89 Thread* thread = Thread::Current(); | 65 Thread* thread = Thread::Current(); |
| 90 uword addr = new_space_.TryAllocateInTLAB(thread, size); | 66 uword addr = new_space_.TryAllocateInTLAB(thread, size); |
| 91 if (addr != 0) { | 67 if (addr == 0) { |
| 92 return addr; | 68 // This call to CollectGarbage might end up "reusing" a collection spawned |
| 93 } | 69 // from a different thread and will be racing to allocate the requested |
| 94 | 70 // memory with other threads being released after the collection. |
| 95 intptr_t tlab_size = CalculateTLABSize(); | 71 CollectGarbage(kNew); |
| 96 if ((tlab_size > 0) && (size > tlab_size)) { | 72 addr = new_space_.TryAllocateInTLAB(thread, size); |
| 97 return AllocateOld(size, HeapPage::kData); | 73 if (addr == 0) { |
| 98 } | 74 return AllocateOld(size, HeapPage::kData); |
| 99 | |
| 100 AbandonRemainingTLAB(thread); | |
| 101 if (tlab_size > 0) { | |
| 102 uword tlab_top = new_space_.TryAllocateNewTLAB(thread, tlab_size); | |
| 103 if (tlab_top != 0) { | |
| 104 addr = new_space_.TryAllocateInTLAB(thread, size); | |
| 105 ASSERT(addr != 0); | |
| 106 return addr; | |
| 107 } | 75 } |
| 108 } | 76 } |
| 109 | 77 return addr; |
| 110 ASSERT(!thread->HasActiveTLAB()); | |
| 111 | |
| 112 // This call to CollectGarbage might end up "reusing" a collection spawned | |
| 113 // from a different thread and will be racing to allocate the requested | |
| 114 // memory with other threads being released after the collection. | |
| 115 CollectGarbage(kNew); | |
| 116 tlab_size = CalculateTLABSize(); | |
| 117 uword tlab_top = new_space_.TryAllocateNewTLAB(thread, tlab_size); | |
| 118 if (tlab_top != 0) { | |
| 119 addr = new_space_.TryAllocateInTLAB(thread, size); | |
| 120 // It is possible a GC doesn't clear enough space. | |
| 121 // In that case, we must fall through and allocate into old space. | |
| 122 if (addr != 0) { | |
| 123 return addr; | |
| 124 } | |
| 125 } | |
| 126 return AllocateOld(size, HeapPage::kData); | |
| 127 } | 78 } |
| 128 | 79 |
| 129 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { | 80 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { |
| 130 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 81 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
| 131 uword addr = old_space_.TryAllocate(size, type); | 82 uword addr = old_space_.TryAllocate(size, type); |
| 132 if (addr != 0) { | 83 if (addr != 0) { |
| 133 return addr; | 84 return addr; |
| 134 } | 85 } |
| 135 // If we are in the process of running a sweep, wait for the sweeper to free | 86 // If we are in the process of running a sweep, wait for the sweeper to free |
| 136 // memory. | 87 // memory. |
| (...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 618 | 569 |
| 619 bool Heap::Verify(MarkExpectation mark_expectation) const { | 570 bool Heap::Verify(MarkExpectation mark_expectation) const { |
| 620 HeapIterationScope heap_iteration_scope(Thread::Current()); | 571 HeapIterationScope heap_iteration_scope(Thread::Current()); |
| 621 return VerifyGC(mark_expectation); | 572 return VerifyGC(mark_expectation); |
| 622 } | 573 } |
| 623 | 574 |
| 624 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { | 575 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
| 625 StackZone stack_zone(Thread::Current()); | 576 StackZone stack_zone(Thread::Current()); |
| 626 | 577 |
| 627 // Change the new space's top_ with the more up-to-date thread's view of top_ | 578 // Change the new space's top_ with the more up-to-date thread's view of top_ |
| 628 new_space_.MakeNewSpaceIterable(); | 579 new_space_.FlushTLS(); |
| 629 | 580 |
| 630 ObjectSet* allocated_set = | 581 ObjectSet* allocated_set = |
| 631 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); | 582 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); |
| 632 VerifyPointersVisitor visitor(isolate(), allocated_set); | 583 VerifyPointersVisitor visitor(isolate(), allocated_set); |
| 633 VisitObjectPointers(&visitor); | 584 VisitObjectPointers(&visitor); |
| 634 | 585 |
| 635 // Only returning a value so that Heap::Validate can be called from an ASSERT. | 586 // Only returning a value so that Heap::Validate can be called from an ASSERT. |
| 636 return true; | 587 return true; |
| 637 } | 588 } |
| 638 | 589 |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 899 : StackResource(thread) { | 850 : StackResource(thread) { |
| 900 Dart::vm_isolate()->heap()->WriteProtect(false); | 851 Dart::vm_isolate()->heap()->WriteProtect(false); |
| 901 } | 852 } |
| 902 | 853 |
| 903 WritableVMIsolateScope::~WritableVMIsolateScope() { | 854 WritableVMIsolateScope::~WritableVMIsolateScope() { |
| 904 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); | 855 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); |
| 905 Dart::vm_isolate()->heap()->WriteProtect(true); | 856 Dart::vm_isolate()->heap()->WriteProtect(true); |
| 906 } | 857 } |
| 907 | 858 |
| 908 } // namespace dart | 859 } // namespace dart |
| OLD | NEW |