| OLD | NEW |
| 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/scavenger.h" | 5 #include "vm/scavenger.h" |
| 6 | 6 |
| 7 #include "vm/dart.h" | 7 #include "vm/dart.h" |
| 8 #include "vm/dart_api_state.h" | 8 #include "vm/dart_api_state.h" |
| 9 #include "vm/isolate.h" | 9 #include "vm/isolate.h" |
| 10 #include "vm/lockers.h" | 10 #include "vm/lockers.h" |
| 11 #include "vm/object.h" | 11 #include "vm/object.h" |
| 12 #include "vm/object_set.h" | 12 #include "vm/object_set.h" |
| 13 #include "vm/object_id_ring.h" | 13 #include "vm/object_id_ring.h" |
| 14 #include "vm/safepoint.h" | 14 #include "vm/safepoint.h" |
| 15 #include "vm/stack_frame.h" | 15 #include "vm/stack_frame.h" |
| 16 #include "vm/store_buffer.h" | 16 #include "vm/store_buffer.h" |
| 17 #include "vm/thread_registry.h" | 17 #include "vm/thread_registry.h" |
| 18 #include "vm/timeline.h" | 18 #include "vm/timeline.h" |
| 19 #include "vm/verifier.h" | 19 #include "vm/verifier.h" |
| 20 #include "vm/visitor.h" | 20 #include "vm/visitor.h" |
| 21 #include "vm/weak_table.h" | 21 #include "vm/weak_table.h" |
| 22 | 22 |
| 23 namespace dart { | 23 namespace dart { |
| 24 | 24 |
| 25 DEFINE_FLAG(int, early_tenuring_threshold, 66, | 25 DEFINE_FLAG(int, |
| 26 early_tenuring_threshold, |
| 27 66, |
| 26 "When more than this percentage of promotion candidates survive, " | 28 "When more than this percentage of promotion candidates survive, " |
| 27 "promote all survivors of next scavenge."); | 29 "promote all survivors of next scavenge."); |
| 28 DEFINE_FLAG(int, new_gen_garbage_threshold, 90, | 30 DEFINE_FLAG(int, |
| 31 new_gen_garbage_threshold, |
| 32 90, |
| 29 "Grow new gen when less than this percentage is garbage."); | 33 "Grow new gen when less than this percentage is garbage."); |
| 30 DEFINE_FLAG(int, new_gen_growth_factor, 4, "Grow new gen by this factor."); | 34 DEFINE_FLAG(int, new_gen_growth_factor, 4, "Grow new gen by this factor."); |
| 31 | 35 |
| 32 // Scavenger uses RawObject::kMarkBit to distinguish forwaded and non-forwarded | 36 // Scavenger uses RawObject::kMarkBit to distinguish forwaded and non-forwarded |
| 33 // objects. The kMarkBit does not intersect with the target address because of | 37 // objects. The kMarkBit does not intersect with the target address because of |
| 34 // object alignment. | 38 // object alignment. |
| 35 enum { | 39 enum { |
| 36 kForwardingMask = 1 << RawObject::kMarkBit, | 40 kForwardingMask = 1 << RawObject::kMarkBit, |
| 37 kNotForwarded = 0, | 41 kNotForwarded = 0, |
| 38 kForwarded = kForwardingMask, | 42 kForwarded = kForwardingMask, |
| (...skipping 26 matching lines...) Expand all Loading... |
| 65 Scavenger* scavenger, | 69 Scavenger* scavenger, |
| 66 SemiSpace* from) | 70 SemiSpace* from) |
| 67 : ObjectPointerVisitor(isolate), | 71 : ObjectPointerVisitor(isolate), |
| 68 thread_(Thread::Current()), | 72 thread_(Thread::Current()), |
| 69 scavenger_(scavenger), | 73 scavenger_(scavenger), |
| 70 from_(from), | 74 from_(from), |
| 71 heap_(scavenger->heap_), | 75 heap_(scavenger->heap_), |
| 72 vm_heap_(Dart::vm_isolate()->heap()), | 76 vm_heap_(Dart::vm_isolate()->heap()), |
| 73 page_space_(scavenger->heap_->old_space()), | 77 page_space_(scavenger->heap_->old_space()), |
| 74 bytes_promoted_(0), | 78 bytes_promoted_(0), |
| 75 visiting_old_object_(NULL) { } | 79 visiting_old_object_(NULL) {} |
| 76 | 80 |
| 77 void VisitPointers(RawObject** first, RawObject** last) { | 81 void VisitPointers(RawObject** first, RawObject** last) { |
| 78 ASSERT((visiting_old_object_ != NULL) || | 82 ASSERT((visiting_old_object_ != NULL) || |
| 79 scavenger_->Contains(reinterpret_cast<uword>(first)) || | 83 scavenger_->Contains(reinterpret_cast<uword>(first)) || |
| 80 !heap_->Contains(reinterpret_cast<uword>(first))); | 84 !heap_->Contains(reinterpret_cast<uword>(first))); |
| 81 for (RawObject** current = first; current <= last; current++) { | 85 for (RawObject** current = first; current <= last; current++) { |
| 82 ScavengePointer(current); | 86 ScavengePointer(current); |
| 83 } | 87 } |
| 84 } | 88 } |
| 85 | 89 |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 152 // Promotion did not succeed. Copy into the to space instead. | 156 // Promotion did not succeed. Copy into the to space instead. |
| 153 new_addr = scavenger_->TryAllocate(size); | 157 new_addr = scavenger_->TryAllocate(size); |
| 154 NOT_IN_PRODUCT(class_table->UpdateLiveNew(cid, size)); | 158 NOT_IN_PRODUCT(class_table->UpdateLiveNew(cid, size)); |
| 155 } | 159 } |
| 156 } | 160 } |
| 157 // During a scavenge we always succeed to at least copy all of the | 161 // During a scavenge we always succeed to at least copy all of the |
| 158 // current objects to the to space. | 162 // current objects to the to space. |
| 159 ASSERT(new_addr != 0); | 163 ASSERT(new_addr != 0); |
| 160 // Copy the object to the new location. | 164 // Copy the object to the new location. |
| 161 memmove(reinterpret_cast<void*>(new_addr), | 165 memmove(reinterpret_cast<void*>(new_addr), |
| 162 reinterpret_cast<void*>(raw_addr), | 166 reinterpret_cast<void*>(raw_addr), size); |
| 163 size); | |
| 164 // Remember forwarding address. | 167 // Remember forwarding address. |
| 165 ForwardTo(raw_addr, new_addr); | 168 ForwardTo(raw_addr, new_addr); |
| 166 } | 169 } |
| 167 // Update the reference. | 170 // Update the reference. |
| 168 RawObject* new_obj = RawObject::FromAddr(new_addr); | 171 RawObject* new_obj = RawObject::FromAddr(new_addr); |
| 169 *p = new_obj; | 172 *p = new_obj; |
| 170 // Update the store buffer as needed. | 173 // Update the store buffer as needed. |
| 171 if (visiting_old_object_ != NULL) { | 174 if (visiting_old_object_ != NULL) { |
| 172 UpdateStoreBuffer(p, new_obj); | 175 UpdateStoreBuffer(p, new_obj); |
| 173 } | 176 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 186 friend class Scavenger; | 189 friend class Scavenger; |
| 187 | 190 |
| 188 DISALLOW_COPY_AND_ASSIGN(ScavengerVisitor); | 191 DISALLOW_COPY_AND_ASSIGN(ScavengerVisitor); |
| 189 }; | 192 }; |
| 190 | 193 |
| 191 | 194 |
| 192 class ScavengerWeakVisitor : public HandleVisitor { | 195 class ScavengerWeakVisitor : public HandleVisitor { |
| 193 public: | 196 public: |
| 194 ScavengerWeakVisitor(Thread* thread, | 197 ScavengerWeakVisitor(Thread* thread, |
| 195 Scavenger* scavenger, | 198 Scavenger* scavenger, |
| 196 FinalizationQueue* finalization_queue) : | 199 FinalizationQueue* finalization_queue) |
| 197 HandleVisitor(thread), | 200 : HandleVisitor(thread), |
| 198 scavenger_(scavenger), | 201 scavenger_(scavenger), |
| 199 queue_(finalization_queue) { | 202 queue_(finalization_queue) { |
| 200 ASSERT(scavenger->heap_->isolate() == thread->isolate()); | 203 ASSERT(scavenger->heap_->isolate() == thread->isolate()); |
| 201 } | 204 } |
| 202 | 205 |
| 203 void VisitHandle(uword addr) { | 206 void VisitHandle(uword addr) { |
| 204 FinalizablePersistentHandle* handle = | 207 FinalizablePersistentHandle* handle = |
| 205 reinterpret_cast<FinalizablePersistentHandle*>(addr); | 208 reinterpret_cast<FinalizablePersistentHandle*>(addr); |
| 206 RawObject** p = handle->raw_addr(); | 209 RawObject** p = handle->raw_addr(); |
| 207 if (scavenger_->IsUnreachable(p)) { | 210 if (scavenger_->IsUnreachable(p)) { |
| 208 handle->UpdateUnreachable(thread()->isolate(), queue_); | 211 handle->UpdateUnreachable(thread()->isolate(), queue_); |
| 209 } else { | 212 } else { |
| 210 handle->UpdateRelocated(thread()->isolate()); | 213 handle->UpdateRelocated(thread()->isolate()); |
| 211 } | 214 } |
| 212 } | 215 } |
| 213 | 216 |
| 214 private: | 217 private: |
| 215 Scavenger* scavenger_; | 218 Scavenger* scavenger_; |
| 216 FinalizationQueue* queue_; | 219 FinalizationQueue* queue_; |
| 217 | 220 |
| 218 DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor); | 221 DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor); |
| 219 }; | 222 }; |
| 220 | 223 |
| 221 | 224 |
| 222 // Visitor used to verify that all old->new references have been added to the | 225 // Visitor used to verify that all old->new references have been added to the |
| 223 // StoreBuffers. | 226 // StoreBuffers. |
| 224 class VerifyStoreBufferPointerVisitor : public ObjectPointerVisitor { | 227 class VerifyStoreBufferPointerVisitor : public ObjectPointerVisitor { |
| 225 public: | 228 public: |
| 226 VerifyStoreBufferPointerVisitor(Isolate* isolate, | 229 VerifyStoreBufferPointerVisitor(Isolate* isolate, const SemiSpace* to) |
| 227 const SemiSpace* to) | |
| 228 : ObjectPointerVisitor(isolate), to_(to) {} | 230 : ObjectPointerVisitor(isolate), to_(to) {} |
| 229 | 231 |
| 230 void VisitPointers(RawObject** first, RawObject** last) { | 232 void VisitPointers(RawObject** first, RawObject** last) { |
| 231 for (RawObject** current = first; current <= last; current++) { | 233 for (RawObject** current = first; current <= last; current++) { |
| 232 RawObject* obj = *current; | 234 RawObject* obj = *current; |
| 233 if (obj->IsHeapObject() && obj->IsNewObject()) { | 235 if (obj->IsHeapObject() && obj->IsNewObject()) { |
| 234 ASSERT(to_->Contains(RawObject::ToAddr(obj))); | 236 ASSERT(to_->Contains(RawObject::ToAddr(obj))); |
| 235 } | 237 } |
| 236 } | 238 } |
| 237 } | 239 } |
| 238 | 240 |
| 239 private: | 241 private: |
| 240 const SemiSpace* to_; | 242 const SemiSpace* to_; |
| 241 | 243 |
| 242 DISALLOW_COPY_AND_ASSIGN(VerifyStoreBufferPointerVisitor); | 244 DISALLOW_COPY_AND_ASSIGN(VerifyStoreBufferPointerVisitor); |
| 243 }; | 245 }; |
| 244 | 246 |
| 245 | 247 |
| 246 SemiSpace::SemiSpace(VirtualMemory* reserved) | 248 SemiSpace::SemiSpace(VirtualMemory* reserved) |
| 247 : reserved_(reserved), region_(NULL, 0) { | 249 : reserved_(reserved), region_(NULL, 0) { |
| 248 if (reserved != NULL) { | 250 if (reserved != NULL) { |
| 249 region_ = MemoryRegion(reserved_->address(), reserved_->size()); | 251 region_ = MemoryRegion(reserved_->address(), reserved_->size()); |
| 250 } | 252 } |
| 251 } | 253 } |
| 252 | 254 |
| 253 | 255 |
| 254 SemiSpace::~SemiSpace() { | 256 SemiSpace::~SemiSpace() { |
| 255 if (reserved_ != NULL) { | 257 if (reserved_ != NULL) { |
| 256 #if defined(DEBUG) | 258 #if defined(DEBUG) |
| 257 memset(reserved_->address(), Heap::kZapByte, | 259 memset(reserved_->address(), Heap::kZapByte, size_in_words() |
| 258 size_in_words() << kWordSizeLog2); | 260 << kWordSizeLog2); |
| 259 #endif // defined(DEBUG) | 261 #endif // defined(DEBUG) |
| 260 delete reserved_; | 262 delete reserved_; |
| 261 } | 263 } |
| 262 } | 264 } |
| 263 | 265 |
| 264 | 266 |
| 265 Mutex* SemiSpace::mutex_ = NULL; | 267 Mutex* SemiSpace::mutex_ = NULL; |
| 266 SemiSpace* SemiSpace::cache_ = NULL; | 268 SemiSpace* SemiSpace::cache_ = NULL; |
| 267 | 269 |
| 268 | 270 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 313 MutexLocker locker(mutex_); | 315 MutexLocker locker(mutex_); |
| 314 old_cache = cache_; | 316 old_cache = cache_; |
| 315 cache_ = this; | 317 cache_ = this; |
| 316 } | 318 } |
| 317 delete old_cache; | 319 delete old_cache; |
| 318 } | 320 } |
| 319 | 321 |
| 320 | 322 |
| 321 void SemiSpace::WriteProtect(bool read_only) { | 323 void SemiSpace::WriteProtect(bool read_only) { |
| 322 if (reserved_ != NULL) { | 324 if (reserved_ != NULL) { |
| 323 bool success = reserved_->Protect( | 325 bool success = reserved_->Protect(read_only ? VirtualMemory::kReadOnly |
| 324 read_only ? VirtualMemory::kReadOnly : VirtualMemory::kReadWrite); | 326 : VirtualMemory::kReadWrite); |
| 325 ASSERT(success); | 327 ASSERT(success); |
| 326 } | 328 } |
| 327 } | 329 } |
| 328 | 330 |
| 329 | 331 |
| 330 Scavenger::Scavenger(Heap* heap, | 332 Scavenger::Scavenger(Heap* heap, |
| 331 intptr_t max_semi_capacity_in_words, | 333 intptr_t max_semi_capacity_in_words, |
| 332 uword object_alignment) | 334 uword object_alignment) |
| 333 : heap_(heap), | 335 : heap_(heap), |
| 334 max_semi_capacity_in_words_(max_semi_capacity_in_words), | 336 max_semi_capacity_in_words_(max_semi_capacity_in_words), |
| 335 object_alignment_(object_alignment), | 337 object_alignment_(object_alignment), |
| 336 scavenging_(false), | 338 scavenging_(false), |
| 337 delayed_weak_properties_(NULL), | 339 delayed_weak_properties_(NULL), |
| 338 gc_time_micros_(0), | 340 gc_time_micros_(0), |
| 339 collections_(0), | 341 collections_(0), |
| 340 external_size_(0) { | 342 external_size_(0) { |
| 341 // Verify assumptions about the first word in objects which the scavenger is | 343 // Verify assumptions about the first word in objects which the scavenger is |
| 342 // going to use for forwarding pointers. | 344 // going to use for forwarding pointers. |
| 343 ASSERT(Object::tags_offset() == 0); | 345 ASSERT(Object::tags_offset() == 0); |
| 344 | 346 |
| 345 // Set initial size resulting in a total of three different levels. | 347 // Set initial size resulting in a total of three different levels. |
| 346 const intptr_t initial_semi_capacity_in_words = max_semi_capacity_in_words / | 348 const intptr_t initial_semi_capacity_in_words = |
| 349 max_semi_capacity_in_words / |
| 347 (FLAG_new_gen_growth_factor * FLAG_new_gen_growth_factor); | 350 (FLAG_new_gen_growth_factor * FLAG_new_gen_growth_factor); |
| 348 to_ = SemiSpace::New(initial_semi_capacity_in_words); | 351 to_ = SemiSpace::New(initial_semi_capacity_in_words); |
| 349 if (to_ == NULL) { | 352 if (to_ == NULL) { |
| 350 OUT_OF_MEMORY(); | 353 OUT_OF_MEMORY(); |
| 351 } | 354 } |
| 352 // Setup local fields. | 355 // Setup local fields. |
| 353 top_ = FirstObjectStart(); | 356 top_ = FirstObjectStart(); |
| 354 resolved_top_ = top_; | 357 resolved_top_ = top_; |
| 355 end_ = to_->end(); | 358 end_ = to_->end(); |
| 356 | 359 |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 536 } | 539 } |
| 537 | 540 |
| 538 | 541 |
| 539 void Scavenger::IterateWeakRoots(Isolate* isolate, HandleVisitor* visitor) { | 542 void Scavenger::IterateWeakRoots(Isolate* isolate, HandleVisitor* visitor) { |
| 540 isolate->VisitWeakPersistentHandles(visitor); | 543 isolate->VisitWeakPersistentHandles(visitor); |
| 541 } | 544 } |
| 542 | 545 |
| 543 | 546 |
| 544 void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { | 547 void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { |
| 545 // Iterate until all work has been drained. | 548 // Iterate until all work has been drained. |
| 546 while ((resolved_top_ < top_) || | 549 while ((resolved_top_ < top_) || PromotedStackHasMore()) { |
| 547 PromotedStackHasMore()) { | |
| 548 while (resolved_top_ < top_) { | 550 while (resolved_top_ < top_) { |
| 549 RawObject* raw_obj = RawObject::FromAddr(resolved_top_); | 551 RawObject* raw_obj = RawObject::FromAddr(resolved_top_); |
| 550 intptr_t class_id = raw_obj->GetClassId(); | 552 intptr_t class_id = raw_obj->GetClassId(); |
| 551 if (class_id != kWeakPropertyCid) { | 553 if (class_id != kWeakPropertyCid) { |
| 552 resolved_top_ += raw_obj->VisitPointers(visitor); | 554 resolved_top_ += raw_obj->VisitPointers(visitor); |
| 553 } else { | 555 } else { |
| 554 RawWeakProperty* raw_weak = reinterpret_cast<RawWeakProperty*>(raw_obj); | 556 RawWeakProperty* raw_weak = reinterpret_cast<RawWeakProperty*>(raw_obj); |
| 555 resolved_top_ += ProcessWeakProperty(raw_weak, visitor); | 557 resolved_top_ += ProcessWeakProperty(raw_weak, visitor); |
| 556 } | 558 } |
| 557 } | 559 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 606 | 608 |
| 607 void Scavenger::UpdateMaxHeapCapacity() { | 609 void Scavenger::UpdateMaxHeapCapacity() { |
| 608 if (heap_ == NULL) { | 610 if (heap_ == NULL) { |
| 609 // Some unit tests. | 611 // Some unit tests. |
| 610 return; | 612 return; |
| 611 } | 613 } |
| 612 ASSERT(to_ != NULL); | 614 ASSERT(to_ != NULL); |
| 613 ASSERT(heap_ != NULL); | 615 ASSERT(heap_ != NULL); |
| 614 Isolate* isolate = heap_->isolate(); | 616 Isolate* isolate = heap_->isolate(); |
| 615 ASSERT(isolate != NULL); | 617 ASSERT(isolate != NULL); |
| 616 isolate->GetHeapNewCapacityMaxMetric()->SetValue( | 618 isolate->GetHeapNewCapacityMaxMetric()->SetValue(to_->size_in_words() * |
| 617 to_->size_in_words() * kWordSize); | 619 kWordSize); |
| 618 } | 620 } |
| 619 | 621 |
| 620 | 622 |
| 621 void Scavenger::UpdateMaxHeapUsage() { | 623 void Scavenger::UpdateMaxHeapUsage() { |
| 622 if (heap_ == NULL) { | 624 if (heap_ == NULL) { |
| 623 // Some unit tests. | 625 // Some unit tests. |
| 624 return; | 626 return; |
| 625 } | 627 } |
| 626 ASSERT(to_ != NULL); | 628 ASSERT(to_ != NULL); |
| 627 ASSERT(heap_ != NULL); | 629 ASSERT(heap_ != NULL); |
| 628 Isolate* isolate = heap_->isolate(); | 630 Isolate* isolate = heap_->isolate(); |
| 629 ASSERT(isolate != NULL); | 631 ASSERT(isolate != NULL); |
| 630 isolate->GetHeapNewUsedMaxMetric()->SetValue(UsedInWords() * kWordSize); | 632 isolate->GetHeapNewUsedMaxMetric()->SetValue(UsedInWords() * kWordSize); |
| 631 } | 633 } |
| 632 | 634 |
| 633 | 635 |
| 634 void Scavenger::EnqueueWeakProperty(RawWeakProperty* raw_weak) { | 636 void Scavenger::EnqueueWeakProperty(RawWeakProperty* raw_weak) { |
| 635 ASSERT(raw_weak->IsHeapObject()); | 637 ASSERT(raw_weak->IsHeapObject()); |
| 636 ASSERT(raw_weak->IsNewObject()); | 638 ASSERT(raw_weak->IsNewObject()); |
| 637 ASSERT(raw_weak->IsWeakProperty()); | 639 ASSERT(raw_weak->IsWeakProperty()); |
| 638 DEBUG_ONLY( | 640 #if defined(DEBUG) |
| 639 uword raw_addr = RawObject::ToAddr(raw_weak); | 641 uword raw_addr = RawObject::ToAddr(raw_weak); |
| 640 uword header = *reinterpret_cast<uword*>(raw_addr); | 642 uword header = *reinterpret_cast<uword*>(raw_addr); |
| 641 ASSERT(!IsForwarding(header)); | 643 ASSERT(!IsForwarding(header)); |
| 642 ) | 644 #endif // defined(DEBUG) |
| 643 ASSERT(raw_weak->ptr()->next_ == 0); | 645 ASSERT(raw_weak->ptr()->next_ == 0); |
| 644 raw_weak->ptr()->next_ = reinterpret_cast<uword>(delayed_weak_properties_); | 646 raw_weak->ptr()->next_ = reinterpret_cast<uword>(delayed_weak_properties_); |
| 645 delayed_weak_properties_ = raw_weak; | 647 delayed_weak_properties_ = raw_weak; |
| 646 } | 648 } |
| 647 | 649 |
| 648 | 650 |
| 649 uword Scavenger::ProcessWeakProperty(RawWeakProperty* raw_weak, | 651 uword Scavenger::ProcessWeakProperty(RawWeakProperty* raw_weak, |
| 650 ScavengerVisitor* visitor) { | 652 ScavengerVisitor* visitor) { |
| 651 // The fate of the weak property is determined by its key. | 653 // The fate of the weak property is determined by its key. |
| 652 RawObject* raw_key = raw_weak->ptr()->key_; | 654 RawObject* raw_key = raw_weak->ptr()->key_; |
| 653 if (raw_key->IsHeapObject() && raw_key->IsNewObject()) { | 655 if (raw_key->IsHeapObject() && raw_key->IsNewObject()) { |
| 654 uword raw_addr = RawObject::ToAddr(raw_key); | 656 uword raw_addr = RawObject::ToAddr(raw_key); |
| 655 uword header = *reinterpret_cast<uword*>(raw_addr); | 657 uword header = *reinterpret_cast<uword*>(raw_addr); |
| 656 if (!IsForwarding(header)) { | 658 if (!IsForwarding(header)) { |
| 657 // Key is white. Enqueue the weak property. | 659 // Key is white. Enqueue the weak property. |
| 658 EnqueueWeakProperty(raw_weak); | 660 EnqueueWeakProperty(raw_weak); |
| 659 return raw_weak->Size(); | 661 return raw_weak->Size(); |
| 660 } | 662 } |
| 661 } | 663 } |
| 662 // Key is gray or black. Make the weak property black. | 664 // Key is gray or black. Make the weak property black. |
| 663 return raw_weak->VisitPointers(visitor); | 665 return raw_weak->VisitPointers(visitor); |
| 664 } | 666 } |
| 665 | 667 |
| 666 | 668 |
| 667 void Scavenger::ProcessWeakReferences() { | 669 void Scavenger::ProcessWeakReferences() { |
| 668 // Rehash the weak tables now that we know which objects survive this cycle. | 670 // Rehash the weak tables now that we know which objects survive this cycle. |
| 669 for (int sel = 0; | 671 for (int sel = 0; sel < Heap::kNumWeakSelectors; sel++) { |
| 670 sel < Heap::kNumWeakSelectors; | 672 WeakTable* table = |
| 671 sel++) { | 673 heap_->GetWeakTable(Heap::kNew, static_cast<Heap::WeakSelector>(sel)); |
| 672 WeakTable* table = heap_->GetWeakTable( | 674 heap_->SetWeakTable(Heap::kNew, static_cast<Heap::WeakSelector>(sel), |
| 673 Heap::kNew, static_cast<Heap::WeakSelector>(sel)); | |
| 674 heap_->SetWeakTable(Heap::kNew, | |
| 675 static_cast<Heap::WeakSelector>(sel), | |
| 676 WeakTable::NewFrom(table)); | 675 WeakTable::NewFrom(table)); |
| 677 intptr_t size = table->size(); | 676 intptr_t size = table->size(); |
| 678 for (intptr_t i = 0; i < size; i++) { | 677 for (intptr_t i = 0; i < size; i++) { |
| 679 if (table->IsValidEntryAt(i)) { | 678 if (table->IsValidEntryAt(i)) { |
| 680 RawObject* raw_obj = table->ObjectAt(i); | 679 RawObject* raw_obj = table->ObjectAt(i); |
| 681 ASSERT(raw_obj->IsHeapObject()); | 680 ASSERT(raw_obj->IsHeapObject()); |
| 682 uword raw_addr = RawObject::ToAddr(raw_obj); | 681 uword raw_addr = RawObject::ToAddr(raw_obj); |
| 683 uword header = *reinterpret_cast<uword*>(raw_addr); | 682 uword header = *reinterpret_cast<uword*>(raw_addr); |
| 684 if (IsForwarding(header)) { | 683 if (IsForwarding(header)) { |
| 685 // The object has survived. Preserve its record. | 684 // The object has survived. Preserve its record. |
| 686 uword new_addr = ForwardedAddr(header); | 685 uword new_addr = ForwardedAddr(header); |
| 687 raw_obj = RawObject::FromAddr(new_addr); | 686 raw_obj = RawObject::FromAddr(new_addr); |
| 688 heap_->SetWeakEntry(raw_obj, | 687 heap_->SetWeakEntry(raw_obj, static_cast<Heap::WeakSelector>(sel), |
| 689 static_cast<Heap::WeakSelector>(sel), | |
| 690 table->ValueAt(i)); | 688 table->ValueAt(i)); |
| 691 } | 689 } |
| 692 } | 690 } |
| 693 } | 691 } |
| 694 // Remove the old table as it has been replaced with the newly allocated | 692 // Remove the old table as it has been replaced with the newly allocated |
| 695 // table above. | 693 // table above. |
| 696 delete table; | 694 delete table; |
| 697 } | 695 } |
| 698 | 696 |
| 699 // The queued weak properties at this point do not refer to reachable keys, | 697 // The queued weak properties at this point do not refer to reachable keys, |
| 700 // so we clear their key and value fields. | 698 // so we clear their key and value fields. |
| 701 { | 699 { |
| 702 RawWeakProperty* cur_weak = delayed_weak_properties_; | 700 RawWeakProperty* cur_weak = delayed_weak_properties_; |
| 703 delayed_weak_properties_ = NULL; | 701 delayed_weak_properties_ = NULL; |
| 704 while (cur_weak != NULL) { | 702 while (cur_weak != NULL) { |
| 705 uword next_weak = cur_weak->ptr()->next_; | 703 uword next_weak = cur_weak->ptr()->next_; |
| 706 // Reset the next pointer in the weak property. | 704 // Reset the next pointer in the weak property. |
| 707 cur_weak->ptr()->next_ = 0; | 705 cur_weak->ptr()->next_ = 0; |
| 708 | 706 |
| 709 DEBUG_ONLY( | 707 #if defined(DEBUG) |
| 710 RawObject* raw_key = cur_weak->ptr()->key_; | 708 RawObject* raw_key = cur_weak->ptr()->key_; |
| 711 uword raw_addr = RawObject::ToAddr(raw_key); | 709 uword raw_addr = RawObject::ToAddr(raw_key); |
| 712 uword header = *reinterpret_cast<uword*>(raw_addr); | 710 uword header = *reinterpret_cast<uword*>(raw_addr); |
| 713 ASSERT(!IsForwarding(header)); | 711 ASSERT(!IsForwarding(header)); |
| 714 ASSERT(raw_key->IsHeapObject()); | 712 ASSERT(raw_key->IsHeapObject()); |
| 715 ASSERT(raw_key->IsNewObject()); // Key still points into from space. | 713 ASSERT(raw_key->IsNewObject()); // Key still points into from space. |
| 716 ) | 714 #endif // defined(DEBUG) |
| 717 | 715 |
| 718 WeakProperty::Clear(cur_weak); | 716 WeakProperty::Clear(cur_weak); |
| 719 | 717 |
| 720 // Advance to next weak property in the queue. | 718 // Advance to next weak property in the queue. |
| 721 cur_weak = reinterpret_cast<RawWeakProperty*>(next_weak); | 719 cur_weak = reinterpret_cast<RawWeakProperty*>(next_weak); |
| 722 } | 720 } |
| 723 } | 721 } |
| 724 } | 722 } |
| 725 | 723 |
| 726 | 724 |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 829 IterateWeakRoots(isolate, &weak_visitor); | 827 IterateWeakRoots(isolate, &weak_visitor); |
| 830 } | 828 } |
| 831 } | 829 } |
| 832 ProcessWeakReferences(); | 830 ProcessWeakReferences(); |
| 833 page_space->ReleaseDataLock(); | 831 page_space->ReleaseDataLock(); |
| 834 | 832 |
| 835 // Scavenge finished. Run accounting. | 833 // Scavenge finished. Run accounting. |
| 836 int64_t end = OS::GetCurrentTimeMicros(); | 834 int64_t end = OS::GetCurrentTimeMicros(); |
| 837 heap_->RecordTime(kProcessToSpace, middle - start); | 835 heap_->RecordTime(kProcessToSpace, middle - start); |
| 838 heap_->RecordTime(kIterateWeaks, end - middle); | 836 heap_->RecordTime(kIterateWeaks, end - middle); |
| 839 stats_history_.Add( | 837 stats_history_.Add(ScavengeStats( |
| 840 ScavengeStats(start, end, | 838 start, end, usage_before, GetCurrentUsage(), promo_candidate_words, |
| 841 usage_before, GetCurrentUsage(), | 839 visitor.bytes_promoted() >> kWordSizeLog2)); |
| 842 promo_candidate_words, | |
| 843 visitor.bytes_promoted() >> kWordSizeLog2)); | |
| 844 } | 840 } |
| 845 Epilogue(isolate, from, invoke_api_callbacks); | 841 Epilogue(isolate, from, invoke_api_callbacks); |
| 846 | 842 |
| 847 // TODO(koda): Make verification more compatible with concurrent sweep. | 843 // TODO(koda): Make verification more compatible with concurrent sweep. |
| 848 if (FLAG_verify_after_gc && !FLAG_concurrent_sweep) { | 844 if (FLAG_verify_after_gc && !FLAG_concurrent_sweep) { |
| 849 OS::PrintErr("Verifying after Scavenge..."); | 845 OS::PrintErr("Verifying after Scavenge..."); |
| 850 heap_->Verify(kForbidMarked); | 846 heap_->Verify(kForbidMarked); |
| 851 OS::PrintErr(" done.\n"); | 847 OS::PrintErr(" done.\n"); |
| 852 } | 848 } |
| 853 | 849 |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 900 } | 896 } |
| 901 | 897 |
| 902 | 898 |
| 903 void Scavenger::FreeExternal(intptr_t size) { | 899 void Scavenger::FreeExternal(intptr_t size) { |
| 904 ASSERT(size >= 0); | 900 ASSERT(size >= 0); |
| 905 external_size_ -= size; | 901 external_size_ -= size; |
| 906 ASSERT(external_size_ >= 0); | 902 ASSERT(external_size_ >= 0); |
| 907 } | 903 } |
| 908 | 904 |
| 909 } // namespace dart | 905 } // namespace dart |
| OLD | NEW |