OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/heap.h" | 5 #include "vm/heap.h" |
6 | 6 |
7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
8 #include "platform/utils.h" | 8 #include "platform/utils.h" |
9 #include "vm/flags.h" | 9 #include "vm/flags.h" |
10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
54 for (int sel = 0; sel < kNumWeakSelectors; sel++) { | 54 for (int sel = 0; sel < kNumWeakSelectors; sel++) { |
55 delete new_weak_tables_[sel]; | 55 delete new_weak_tables_[sel]; |
56 delete old_weak_tables_[sel]; | 56 delete old_weak_tables_[sel]; |
57 } | 57 } |
58 } | 58 } |
59 | 59 |
60 uword Heap::AllocateNew(intptr_t size) { | 60 uword Heap::AllocateNew(intptr_t size) { |
61 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 61 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
62 // Currently, only the Dart thread may allocate in new space. | 62 // Currently, only the Dart thread may allocate in new space. |
63 isolate()->AssertCurrentThreadIsMutator(); | 63 isolate()->AssertCurrentThreadIsMutator(); |
64 uword addr = new_space_.TryAllocate(size); | 64 Thread* thread = Thread::Current(); |
| 65 uword addr = new_space_.TryAllocateInTLAB(thread, size); |
65 if (addr == 0) { | 66 if (addr == 0) { |
66 // This call to CollectGarbage might end up "reusing" a collection spawned | 67 // This call to CollectGarbage might end up "reusing" a collection spawned |
67 // from a different thread and will be racing to allocate the requested | 68 // from a different thread and will be racing to allocate the requested |
68 // memory with other threads being released after the collection. | 69 // memory with other threads being released after the collection. |
69 CollectGarbage(kNew); | 70 CollectGarbage(kNew); |
70 addr = new_space_.TryAllocate(size); | 71 addr = new_space_.TryAllocateInTLAB(thread, size); |
71 if (addr == 0) { | 72 if (addr == 0) { |
72 return AllocateOld(size, HeapPage::kData); | 73 return AllocateOld(size, HeapPage::kData); |
73 } | 74 } |
74 } | 75 } |
75 return addr; | 76 return addr; |
76 } | 77 } |
77 | 78 |
78 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { | 79 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { |
79 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 80 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
80 uword addr = old_space_.TryAllocate(size, type); | 81 uword addr = old_space_.TryAllocate(size, type); |
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 bool Heap::GrowthControlState() { | 464 bool Heap::GrowthControlState() { |
464 return old_space_.GrowthControlState(); | 465 return old_space_.GrowthControlState(); |
465 } | 466 } |
466 | 467 |
467 void Heap::WriteProtect(bool read_only) { | 468 void Heap::WriteProtect(bool read_only) { |
468 read_only_ = read_only; | 469 read_only_ = read_only; |
469 new_space_.WriteProtect(read_only); | 470 new_space_.WriteProtect(read_only); |
470 old_space_.WriteProtect(read_only); | 471 old_space_.WriteProtect(read_only); |
471 } | 472 } |
472 | 473 |
473 intptr_t Heap::TopOffset(Heap::Space space) { | |
474 if (space == kNew) { | |
475 return OFFSET_OF(Heap, new_space_) + Scavenger::top_offset(); | |
476 } else { | |
477 ASSERT(space == kOld); | |
478 return OFFSET_OF(Heap, old_space_) + PageSpace::top_offset(); | |
479 } | |
480 } | |
481 | |
482 intptr_t Heap::EndOffset(Heap::Space space) { | |
483 if (space == kNew) { | |
484 return OFFSET_OF(Heap, new_space_) + Scavenger::end_offset(); | |
485 } else { | |
486 ASSERT(space == kOld); | |
487 return OFFSET_OF(Heap, old_space_) + PageSpace::end_offset(); | |
488 } | |
489 } | |
490 | |
491 void Heap::Init(Isolate* isolate, | 474 void Heap::Init(Isolate* isolate, |
492 intptr_t max_new_gen_words, | 475 intptr_t max_new_gen_words, |
493 intptr_t max_old_gen_words, | 476 intptr_t max_old_gen_words, |
494 intptr_t max_external_words) { | 477 intptr_t max_external_words) { |
495 ASSERT(isolate->heap() == NULL); | 478 ASSERT(isolate->heap() == NULL); |
496 Heap* heap = new Heap(isolate, max_new_gen_words, max_old_gen_words, | 479 Heap* heap = new Heap(isolate, max_new_gen_words, max_old_gen_words, |
497 max_external_words); | 480 max_external_words); |
498 isolate->set_heap(heap); | 481 isolate->set_heap(heap); |
499 } | 482 } |
500 | 483 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
554 return allocated_set; | 537 return allocated_set; |
555 } | 538 } |
556 | 539 |
557 bool Heap::Verify(MarkExpectation mark_expectation) const { | 540 bool Heap::Verify(MarkExpectation mark_expectation) const { |
558 HeapIterationScope heap_iteration_scope; | 541 HeapIterationScope heap_iteration_scope; |
559 return VerifyGC(mark_expectation); | 542 return VerifyGC(mark_expectation); |
560 } | 543 } |
561 | 544 |
562 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { | 545 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
563 StackZone stack_zone(Thread::Current()); | 546 StackZone stack_zone(Thread::Current()); |
| 547 |
| 548 // Change the new space's top_ with the more up-to-date thread's view of top_ |
| 549 new_space_.FlushTLS(); |
| 550 |
564 ObjectSet* allocated_set = | 551 ObjectSet* allocated_set = |
565 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); | 552 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); |
566 VerifyPointersVisitor visitor(isolate(), allocated_set); | 553 VerifyPointersVisitor visitor(isolate(), allocated_set); |
567 VisitObjectPointers(&visitor); | 554 VisitObjectPointers(&visitor); |
568 | 555 |
569 // Only returning a value so that Heap::Validate can be called from an ASSERT. | 556 // Only returning a value so that Heap::Validate can be called from an ASSERT. |
570 return true; | 557 return true; |
571 } | 558 } |
572 | 559 |
573 void Heap::PrintSizes() const { | 560 void Heap::PrintSizes() const { |
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
831 : StackResource(thread) { | 818 : StackResource(thread) { |
832 Dart::vm_isolate()->heap()->WriteProtect(false); | 819 Dart::vm_isolate()->heap()->WriteProtect(false); |
833 } | 820 } |
834 | 821 |
835 WritableVMIsolateScope::~WritableVMIsolateScope() { | 822 WritableVMIsolateScope::~WritableVMIsolateScope() { |
836 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); | 823 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); |
837 Dart::vm_isolate()->heap()->WriteProtect(true); | 824 Dart::vm_isolate()->heap()->WriteProtect(true); |
838 } | 825 } |
839 | 826 |
840 } // namespace dart | 827 } // namespace dart |
OLD | NEW |