OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/heap.h" | 5 #include "vm/heap.h" |
6 | 6 |
7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
8 #include "platform/utils.h" | 8 #include "platform/utils.h" |
9 #include "vm/flags.h" | 9 #include "vm/flags.h" |
10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
50 Heap::~Heap() { | 50 Heap::~Heap() { |
51 delete barrier_; | 51 delete barrier_; |
52 delete barrier_done_; | 52 delete barrier_done_; |
53 | 53 |
54 for (int sel = 0; sel < kNumWeakSelectors; sel++) { | 54 for (int sel = 0; sel < kNumWeakSelectors; sel++) { |
55 delete new_weak_tables_[sel]; | 55 delete new_weak_tables_[sel]; |
56 delete old_weak_tables_[sel]; | 56 delete old_weak_tables_[sel]; |
57 } | 57 } |
58 } | 58 } |
59 | 59 |
60 void Heap::FillRemainingTLAB(Thread* thread) { | |
61 uword start = thread->top(); | |
62 uword end = thread->end(); | |
63 ASSERT(end >= start); | |
64 intptr_t size = end - start; | |
65 ASSERT(Utils::IsAligned(size, kObjectAlignment)); | |
66 if (size >= kObjectAlignment) { | |
67 FreeListElement::AsElement(start, size); | |
68 ASSERT(RawObject::FromAddr(start)->Size() == size); | |
69 ASSERT((start + size) == new_space_.top()); | |
70 } | |
71 } | |
72 | |
73 void Heap::AbandonRemainingTLAB(Thread* thread) { | |
74 FillRemainingTLAB(thread); | |
75 thread->set_top(0); | |
76 thread->set_end(0); | |
77 } | |
78 | |
79 intptr_t Heap::CalculateTLABSize() { | |
80 intptr_t size = new_space_.end() - new_space_.top(); | |
81 return Utils::RoundDown(size, kObjectAlignment); | |
82 } | |
83 | |
84 uword Heap::AllocateNew(intptr_t size) { | 60 uword Heap::AllocateNew(intptr_t size) { |
85 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 61 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
86 // Currently, only the Dart thread may allocate in new space. | 62 // Currently, only the Dart thread may allocate in new space. |
87 isolate()->AssertCurrentThreadIsMutator(); | 63 isolate()->AssertCurrentThreadIsMutator(); |
88 Thread* thread = Thread::Current(); | 64 Thread* thread = Thread::Current(); |
89 uword addr = new_space_.TryAllocateInTLAB(thread, size); | 65 uword addr = new_space_.TryAllocateInTLAB(thread, size); |
90 if (addr != 0) { | 66 if (addr == 0) { |
91 return addr; | 67 // This call to CollectGarbage might end up "reusing" a collection spawned |
92 } | 68 // from a different thread and will be racing to allocate the requested |
93 | 69 // memory with other threads being released after the collection. |
94 intptr_t tlab_size = CalculateTLABSize(); | 70 CollectGarbage(kNew); |
95 if ((tlab_size > 0) && (size > tlab_size)) { | 71 addr = new_space_.TryAllocateInTLAB(thread, size); |
96 return AllocateOld(size, HeapPage::kData); | 72 if (addr == 0) { |
97 } | 73 return AllocateOld(size, HeapPage::kData); |
98 | |
99 AbandonRemainingTLAB(thread); | |
100 if (tlab_size > 0) { | |
101 uword tlab_top = new_space_.TryAllocateNewTLAB(thread, tlab_size); | |
102 if (tlab_top != 0) { | |
103 addr = new_space_.TryAllocateInTLAB(thread, size); | |
104 ASSERT(addr != 0); | |
105 return addr; | |
106 } | 74 } |
107 } | 75 } |
108 | 76 return addr; |
109 ASSERT(!thread->HasActiveTLAB()); | |
110 | |
111 // This call to CollectGarbage might end up "reusing" a collection spawned | |
112 // from a different thread and will be racing to allocate the requested | |
113 // memory with other threads being released after the collection. | |
114 CollectGarbage(kNew); | |
115 tlab_size = CalculateTLABSize(); | |
116 uword tlab_top = new_space_.TryAllocateNewTLAB(thread, tlab_size); | |
117 if (tlab_top != 0) { | |
118 addr = new_space_.TryAllocateInTLAB(thread, size); | |
119 // It is possible a GC doesn't clear enough space. | |
120 // In that case, we must fall through and allocate into old space. | |
121 if (addr != 0) { | |
122 return addr; | |
123 } | |
124 } | |
125 return AllocateOld(size, HeapPage::kData); | |
126 } | 77 } |
127 | 78 |
128 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { | 79 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { |
129 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 80 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
130 uword addr = old_space_.TryAllocate(size, type); | 81 uword addr = old_space_.TryAllocate(size, type); |
131 if (addr != 0) { | 82 if (addr != 0) { |
132 return addr; | 83 return addr; |
133 } | 84 } |
134 // If we are in the process of running a sweep, wait for the sweeper to free | 85 // If we are in the process of running a sweep, wait for the sweeper to free |
135 // memory. | 86 // memory. |
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
597 | 548 |
598 bool Heap::Verify(MarkExpectation mark_expectation) const { | 549 bool Heap::Verify(MarkExpectation mark_expectation) const { |
599 HeapIterationScope heap_iteration_scope; | 550 HeapIterationScope heap_iteration_scope; |
600 return VerifyGC(mark_expectation); | 551 return VerifyGC(mark_expectation); |
601 } | 552 } |
602 | 553 |
603 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { | 554 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
604 StackZone stack_zone(Thread::Current()); | 555 StackZone stack_zone(Thread::Current()); |
605 | 556 |
606 // Change the new space's top_ with the more up-to-date thread's view of top_ | 557 // Change the new space's top_ with the more up-to-date thread's view of top_ |
607 uword saved_top = new_space_.FlushTLS(); | 558 new_space_.FlushTLS(); |
608 | 559 |
609 ObjectSet* allocated_set = | 560 ObjectSet* allocated_set = |
610 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); | 561 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); |
611 VerifyPointersVisitor visitor(isolate(), allocated_set); | 562 VerifyPointersVisitor visitor(isolate(), allocated_set); |
612 VisitObjectPointers(&visitor); | 563 VisitObjectPointers(&visitor); |
613 | 564 |
614 new_space_.UnflushTLS(saved_top); | |
615 // Only returning a value so that Heap::Validate can be called from an ASSERT. | 565 // Only returning a value so that Heap::Validate can be called from an ASSERT. |
616 return true; | 566 return true; |
617 } | 567 } |
618 | 568 |
619 void Heap::PrintSizes() const { | 569 void Heap::PrintSizes() const { |
620 OS::PrintErr( | 570 OS::PrintErr( |
621 "New space (%" Pd64 "k of %" Pd64 | 571 "New space (%" Pd64 "k of %" Pd64 |
622 "k) " | 572 "k) " |
623 "Old space (%" Pd64 "k of %" Pd64 "k)\n", | 573 "Old space (%" Pd64 "k of %" Pd64 "k)\n", |
624 (UsedInWords(kNew) / KBInWords), (CapacityInWords(kNew) / KBInWords), | 574 (UsedInWords(kNew) / KBInWords), (CapacityInWords(kNew) / KBInWords), |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
879 : StackResource(thread) { | 829 : StackResource(thread) { |
880 Dart::vm_isolate()->heap()->WriteProtect(false); | 830 Dart::vm_isolate()->heap()->WriteProtect(false); |
881 } | 831 } |
882 | 832 |
883 WritableVMIsolateScope::~WritableVMIsolateScope() { | 833 WritableVMIsolateScope::~WritableVMIsolateScope() { |
884 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); | 834 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); |
885 Dart::vm_isolate()->heap()->WriteProtect(true); | 835 Dart::vm_isolate()->heap()->WriteProtect(true); |
886 } | 836 } |
887 | 837 |
888 } // namespace dart | 838 } // namespace dart |
OLD | NEW |