| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/heap.h" | 5 #include "vm/heap.h" |
| 6 | 6 |
| 7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
| 8 #include "platform/utils.h" | 8 #include "platform/utils.h" |
| 9 #include "vm/flags.h" | 9 #include "vm/flags.h" |
| 10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
| 11 #include "vm/lockers.h" | 11 #include "vm/lockers.h" |
| 12 #include "vm/object.h" | 12 #include "vm/object.h" |
| 13 #include "vm/object_set.h" | 13 #include "vm/object_set.h" |
| 14 #include "vm/os.h" | 14 #include "vm/os.h" |
| 15 #include "vm/pages.h" | 15 #include "vm/pages.h" |
| 16 #include "vm/raw_object.h" | 16 #include "vm/raw_object.h" |
| 17 #include "vm/safepoint.h" |
| 17 #include "vm/scavenger.h" | 18 #include "vm/scavenger.h" |
| 18 #include "vm/service.h" | 19 #include "vm/service.h" |
| 19 #include "vm/service_event.h" | 20 #include "vm/service_event.h" |
| 20 #include "vm/service_isolate.h" | 21 #include "vm/service_isolate.h" |
| 21 #include "vm/stack_frame.h" | 22 #include "vm/stack_frame.h" |
| 22 #include "vm/tags.h" | 23 #include "vm/tags.h" |
| 23 #include "vm/timeline.h" | 24 #include "vm/timeline.h" |
| 24 #include "vm/verifier.h" | 25 #include "vm/verifier.h" |
| 25 #include "vm/virtual_memory.h" | 26 #include "vm/virtual_memory.h" |
| 26 #include "vm/weak_table.h" | 27 #include "vm/weak_table.h" |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 183 | 184 |
| 184 void Heap::VisitObjectsNoImagePages(ObjectVisitor* visitor) const { | 185 void Heap::VisitObjectsNoImagePages(ObjectVisitor* visitor) const { |
| 185 new_space_.VisitObjects(visitor); | 186 new_space_.VisitObjects(visitor); |
| 186 old_space_.VisitObjectsNoImagePages(visitor); | 187 old_space_.VisitObjectsNoImagePages(visitor); |
| 187 } | 188 } |
| 188 | 189 |
| 189 void Heap::VisitObjectsImagePages(ObjectVisitor* visitor) const { | 190 void Heap::VisitObjectsImagePages(ObjectVisitor* visitor) const { |
| 190 old_space_.VisitObjectsImagePages(visitor); | 191 old_space_.VisitObjectsImagePages(visitor); |
| 191 } | 192 } |
| 192 | 193 |
| 193 HeapIterationScope::HeapIterationScope(bool writable) | 194 HeapIterationScope::HeapIterationScope(Thread* thread, bool writable) |
| 194 : StackResource(Thread::Current()), | 195 : StackResource(thread), |
| 195 old_space_(isolate()->heap()->old_space()), | 196 heap_(isolate()->heap()), |
| 197 old_space_(heap_->old_space()), |
| 196 writable_(writable) { | 198 writable_(writable) { |
| 197 { | 199 { |
| 198 // It's not yet safe to iterate over a paged space while it's concurrently | 200 // It's not yet safe to iterate over a paged space while it's concurrently |
| 199 // sweeping, so wait for any such task to complete first. | 201 // sweeping, so wait for any such task to complete first. |
| 200 MonitorLocker ml(old_space_->tasks_lock()); | 202 MonitorLocker ml(old_space_->tasks_lock()); |
| 201 #if defined(DEBUG) | 203 #if defined(DEBUG) |
| 202 // We currently don't support nesting of HeapIterationScopes. | 204 // We currently don't support nesting of HeapIterationScopes. |
| 203 ASSERT(old_space_->iterating_thread_ != thread()); | 205 ASSERT(old_space_->iterating_thread_ != thread); |
| 204 #endif | 206 #endif |
| 205 while (old_space_->tasks() > 0) { | 207 while (old_space_->tasks() > 0) { |
| 206 ml.WaitWithSafepointCheck(thread()); | 208 ml.WaitWithSafepointCheck(thread); |
| 207 } | 209 } |
| 208 #if defined(DEBUG) | 210 #if defined(DEBUG) |
| 209 ASSERT(old_space_->iterating_thread_ == NULL); | 211 ASSERT(old_space_->iterating_thread_ == NULL); |
| 210 old_space_->iterating_thread_ = thread(); | 212 old_space_->iterating_thread_ = thread; |
| 211 #endif | 213 #endif |
| 212 old_space_->set_tasks(1); | 214 old_space_->set_tasks(1); |
| 213 } | 215 } |
| 214 | 216 |
| 217 isolate()->safepoint_handler()->SafepointThreads(thread); |
| 218 |
| 215 if (writable_) { | 219 if (writable_) { |
| 216 thread()->heap()->WriteProtectCode(false); | 220 heap_->WriteProtectCode(false); |
| 217 } | 221 } |
| 218 } | 222 } |
| 219 | 223 |
| 220 HeapIterationScope::~HeapIterationScope() { | 224 HeapIterationScope::~HeapIterationScope() { |
| 221 if (writable_) { | 225 if (writable_) { |
| 222 thread()->heap()->WriteProtectCode(true); | 226 heap_->WriteProtectCode(true); |
| 223 } | 227 } |
| 224 | 228 |
| 229 isolate()->safepoint_handler()->ResumeThreads(thread()); |
| 230 |
| 225 MonitorLocker ml(old_space_->tasks_lock()); | 231 MonitorLocker ml(old_space_->tasks_lock()); |
| 226 #if defined(DEBUG) | 232 #if defined(DEBUG) |
| 227 ASSERT(old_space_->iterating_thread_ == thread()); | 233 ASSERT(old_space_->iterating_thread_ == thread()); |
| 228 old_space_->iterating_thread_ = NULL; | 234 old_space_->iterating_thread_ = NULL; |
| 229 #endif | 235 #endif |
| 230 ASSERT(old_space_->tasks() == 1); | 236 ASSERT(old_space_->tasks() == 1); |
| 231 old_space_->set_tasks(0); | 237 old_space_->set_tasks(0); |
| 232 ml.NotifyAll(); | 238 ml.NotifyAll(); |
| 233 } | 239 } |
| 234 | 240 |
| 235 void Heap::IterateObjects(ObjectVisitor* visitor) const { | 241 void HeapIterationScope::IterateObjects(ObjectVisitor* visitor) const { |
| 236 // The visitor must not allocate from the heap. | 242 heap_->VisitObjects(visitor); |
| 237 NoSafepointScope no_safepoint_scope_; | |
| 238 new_space_.VisitObjects(visitor); | |
| 239 IterateOldObjects(visitor); | |
| 240 } | 243 } |
| 241 | 244 |
| 242 void Heap::IterateOldObjects(ObjectVisitor* visitor) const { | 245 void HeapIterationScope::IterateObjectsNoImagePages( |
| 243 HeapIterationScope heap_iteration_scope; | 246 ObjectVisitor* visitor) const { |
| 244 old_space_.VisitObjects(visitor); | 247 heap_->new_space()->VisitObjects(visitor); |
| 248 heap_->old_space()->VisitObjectsNoImagePages(visitor); |
| 245 } | 249 } |
| 246 | 250 |
| 247 void Heap::IterateOldObjectsNoImagePages(ObjectVisitor* visitor) const { | 251 void HeapIterationScope::IterateOldObjects(ObjectVisitor* visitor) const { |
| 248 HeapIterationScope heap_iteration_scope; | 252 old_space_->VisitObjects(visitor); |
| 249 old_space_.VisitObjectsNoImagePages(visitor); | 253 } |
| 254 |
| 255 void HeapIterationScope::IterateOldObjectsNoImagePages( |
| 256 ObjectVisitor* visitor) const { |
| 257 old_space_->VisitObjectsNoImagePages(visitor); |
| 258 } |
| 259 |
| 260 void HeapIterationScope::IterateVMIsolateObjects(ObjectVisitor* visitor) const { |
| 261 Dart::vm_isolate()->heap()->VisitObjects(visitor); |
| 262 } |
| 263 |
| 264 void HeapIterationScope::IterateObjectPointers(ObjectPointerVisitor* visitor, |
| 265 bool validate_frames) { |
| 266 isolate()->VisitObjectPointers(visitor, validate_frames); |
| 267 } |
| 268 |
| 269 void HeapIterationScope::IterateStackPointers(ObjectPointerVisitor* visitor, |
| 270 bool validate_frames) { |
| 271 isolate()->VisitStackPointers(visitor, validate_frames); |
| 250 } | 272 } |
| 251 | 273 |
| 252 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { | 274 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { |
| 253 new_space_.VisitObjectPointers(visitor); | 275 new_space_.VisitObjectPointers(visitor); |
| 254 old_space_.VisitObjectPointers(visitor); | 276 old_space_.VisitObjectPointers(visitor); |
| 255 } | 277 } |
| 256 | 278 |
| 257 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { | 279 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { |
| 258 // Only executable pages can have RawInstructions objects. | 280 // Only executable pages can have RawInstructions objects. |
| 259 RawObject* raw_obj = old_space_.FindObject(visitor, HeapPage::kExecutable); | 281 RawObject* raw_obj = old_space_.FindObject(visitor, HeapPage::kExecutable); |
| 260 ASSERT((raw_obj == Object::null()) || | 282 ASSERT((raw_obj == Object::null()) || |
| 261 (raw_obj->GetClassId() == kInstructionsCid)); | 283 (raw_obj->GetClassId() == kInstructionsCid)); |
| 262 return reinterpret_cast<RawInstructions*>(raw_obj); | 284 return reinterpret_cast<RawInstructions*>(raw_obj); |
| 263 } | 285 } |
| 264 | 286 |
| 265 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { | 287 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { |
| 266 HeapIterationScope heap_iteration_scope; | |
| 267 return old_space_.FindObject(visitor, HeapPage::kData); | 288 return old_space_.FindObject(visitor, HeapPage::kData); |
| 268 } | 289 } |
| 269 | 290 |
| 270 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { | 291 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { |
| 271 return new_space_.FindObject(visitor); | 292 return new_space_.FindObject(visitor); |
| 272 } | 293 } |
| 273 | 294 |
| 274 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { | 295 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { |
| 275 // The visitor must not allocate from the heap. | 296 // The visitor must not allocate from the heap. |
| 276 NoSafepointScope no_safepoint_scope; | 297 NoSafepointScope no_safepoint_scope; |
| (...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 540 // VM isolate heap is premarked. | 561 // VM isolate heap is premarked. |
| 541 VerifyObjectVisitor vm_object_visitor(isolate(), allocated_set, | 562 VerifyObjectVisitor vm_object_visitor(isolate(), allocated_set, |
| 542 kRequireMarked); | 563 kRequireMarked); |
| 543 vm_isolate->heap()->VisitObjects(&vm_object_visitor); | 564 vm_isolate->heap()->VisitObjects(&vm_object_visitor); |
| 544 } | 565 } |
| 545 | 566 |
| 546 return allocated_set; | 567 return allocated_set; |
| 547 } | 568 } |
| 548 | 569 |
| 549 bool Heap::Verify(MarkExpectation mark_expectation) const { | 570 bool Heap::Verify(MarkExpectation mark_expectation) const { |
| 550 HeapIterationScope heap_iteration_scope; | 571 HeapIterationScope heap_iteration_scope(Thread::Current()); |
| 551 return VerifyGC(mark_expectation); | 572 return VerifyGC(mark_expectation); |
| 552 } | 573 } |
| 553 | 574 |
| 554 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { | 575 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
| 555 StackZone stack_zone(Thread::Current()); | 576 StackZone stack_zone(Thread::Current()); |
| 556 | 577 |
| 557 // Change the new space's top_ with the more up-to-date thread's view of top_ | 578 // Change the new space's top_ with the more up-to-date thread's view of top_ |
| 558 new_space_.FlushTLS(); | 579 new_space_.FlushTLS(); |
| 559 | 580 |
| 560 ObjectSet* allocated_set = | 581 ObjectSet* allocated_set = |
| (...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 829 : StackResource(thread) { | 850 : StackResource(thread) { |
| 830 Dart::vm_isolate()->heap()->WriteProtect(false); | 851 Dart::vm_isolate()->heap()->WriteProtect(false); |
| 831 } | 852 } |
| 832 | 853 |
| 833 WritableVMIsolateScope::~WritableVMIsolateScope() { | 854 WritableVMIsolateScope::~WritableVMIsolateScope() { |
| 834 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); | 855 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); |
| 835 Dart::vm_isolate()->heap()->WriteProtect(true); | 856 Dart::vm_isolate()->heap()->WriteProtect(true); |
| 836 } | 857 } |
| 837 | 858 |
| 838 } // namespace dart | 859 } // namespace dart |
| OLD | NEW |