Chromium Code Reviews| Index: runtime/vm/scavenger.cc |
| diff --git a/runtime/vm/scavenger.cc b/runtime/vm/scavenger.cc |
| index a13bcce75a27f8ceee815933cd333ac69f8ebe6e..1e319bf66c416d367bfde9d26547664acb0f4404 100644 |
| --- a/runtime/vm/scavenger.cc |
| +++ b/runtime/vm/scavenger.cc |
| @@ -338,7 +338,8 @@ Scavenger::Scavenger(Heap* heap, |
| delayed_weak_properties_(NULL), |
| gc_time_micros_(0), |
| collections_(0), |
| - external_size_(0) { |
| + external_size_(0), |
| + space_lock_(new Mutex()) { |
| // Verify assumptions about the first word in objects which the scavenger is |
| // going to use for forwarding pointers. |
| ASSERT(Object::tags_offset() == 0); |
| @@ -370,6 +371,7 @@ Scavenger::Scavenger(Heap* heap, |
| Scavenger::~Scavenger() { |
| ASSERT(!scavenging_); |
| to_->Delete(); |
| + delete space_lock_; |
| } |
| @@ -392,6 +394,12 @@ SemiSpace* Scavenger::Prologue(Isolate* isolate, bool invoke_api_callbacks) { |
| (isolate->gc_prologue_callback())(); |
| } |
| isolate->PrepareForGC(); |
| + |
| + Thread* thread = Thread::Current(); |
|
rmacnak
2017/07/05 17:39:53
How about
Thread* mutator = isolate->mutator_thre
danunez
2017/07/05 18:12:55
Done.
|
| + if (!thread->IsMutatorThread()) { |
| + thread = isolate->mutator_thread(); |
| + } |
| + |
| // Flip the two semi-spaces so that to_ is always the space for allocating |
| // objects. |
| SemiSpace* from = to_; |
| @@ -409,6 +417,12 @@ SemiSpace* Scavenger::Prologue(Isolate* isolate, bool invoke_api_callbacks) { |
| top_ = FirstObjectStart(); |
| resolved_top_ = top_; |
| end_ = to_->end(); |
| + |
| + if (thread->heap() != NULL) { |
| + thread->set_top_offset(top_); |
| + thread->set_end_offset(end_); |
| + } |
| + |
| return from; |
| } |
| @@ -418,6 +432,25 @@ void Scavenger::Epilogue(Isolate* isolate, |
| bool invoke_api_callbacks) { |
| // All objects in the to space have been copied from the from space at this |
| // moment. |
| + |
| + Thread* thread = Thread::Current(); |
| + |
| + if (!thread->IsMutatorThread()) { |
| + thread = isolate->mutator_thread(); |
| + } |
| + uword top = 0; |
| + uword end = 0; |
| + |
| + |
| + if (thread->heap() != NULL) { |
| + top = thread->top(); |
| + ASSERT(thread->end() == end_); |
| + end = thread->end(); |
| + } else { |
| + top = top_; |
| + end = end_; |
| + } |
| + |
| double avg_frac = stats_history_.Get(0).PromoCandidatesSuccessFraction(); |
| if (stats_history_.Size() >= 2) { |
| // Previous scavenge is only given half as much weight. |
| @@ -426,11 +459,11 @@ void Scavenger::Epilogue(Isolate* isolate, |
| } |
| if (avg_frac < (FLAG_early_tenuring_threshold / 100.0)) { |
| // Remember the limit to which objects have been copied. |
| - survivor_end_ = top_; |
| + survivor_end_ = top; |
| } else { |
| // Move survivor end to the end of the to_ space, making all surviving |
| // objects candidates for promotion next time. |
| - survivor_end_ = end_; |
| + survivor_end_ = end; |
| } |
| #if defined(DEBUG) |
| // We can only safely verify the store buffers from old space if there is no |
| @@ -554,8 +587,22 @@ void Scavenger::IterateWeakRoots(Isolate* isolate, HandleVisitor* visitor) { |
| void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { |
| // Iterate until all work has been drained. |
| - while ((resolved_top_ < top_) || PromotedStackHasMore()) { |
| - while (resolved_top_ < top_) { |
| + |
| + uword top = 0; |
| + Thread* thread = Thread::Current(); |
| + |
| + if (!thread->IsMutatorThread()) { |
| + thread = Isolate::Current()->mutator_thread(); |
| + } |
| + |
| + if (thread->heap() != NULL) { |
| + top = thread->top(); |
| + } else { |
| + top = top_; |
| + } |
| + |
| + while ((resolved_top_ < top) || PromotedStackHasMore()) { |
| + while (resolved_top_ < top) { |
| RawObject* raw_obj = RawObject::FromAddr(resolved_top_); |
| intptr_t class_id = raw_obj->GetClassId(); |
| if (class_id != kWeakPropertyCid) { |
| @@ -564,6 +611,12 @@ void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { |
| RawWeakProperty* raw_weak = reinterpret_cast<RawWeakProperty*>(raw_obj); |
| resolved_top_ += ProcessWeakProperty(raw_weak, visitor); |
| } |
| + |
| + if (thread->heap() != NULL) { |
| + top = thread->top(); |
| + } else { |
| + top = top_; |
| + } |
| } |
| { |
| // Visit all the promoted objects and update/scavenge their internal |
| @@ -576,6 +629,12 @@ void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { |
| ASSERT(!raw_object->IsRemembered()); |
| visitor->VisitingOldObject(raw_object); |
| raw_object->VisitPointersNonvirtual(visitor); |
| + |
| + if (thread->heap() != NULL) { |
| + top = thread->top(); |
| + } else { |
| + top = top_; |
| + } |
| } |
| visitor->VisitingOldObject(NULL); |
| } |
| @@ -606,6 +665,13 @@ void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) { |
| } else { |
| EnqueueWeakProperty(cur_weak); |
| } |
| + |
| + if (thread->heap() != NULL) { |
| + top = thread->top(); |
| + } else { |
| + top = top_; |
| + } |
| + |
| // Advance to next weak property in the queue. |
| cur_weak = reinterpret_cast<RawWeakProperty*>(next_weak); |
| } |
| @@ -732,7 +798,19 @@ void Scavenger::ProcessWeakReferences() { |
| void Scavenger::VisitObjectPointers(ObjectPointerVisitor* visitor) const { |
| uword cur = FirstObjectStart(); |
| - while (cur < top_) { |
| + Thread* thread = Thread::Current(); |
| + |
| + if (!thread->IsMutatorThread()) { |
|
rmacnak
2017/07/05 17:39:53
Ditto
danunez
2017/07/05 18:12:55
Done.
|
| + thread = Isolate::Current()->mutator_thread(); |
| + } |
| + |
| + uword top = CapacityInWords() == 0 ? 0 : thread->top(); |
| + |
| + if (thread->heap() == NULL) { |
| + top = top_; |
| + } |
| + |
| + while (cur < top) { |
| RawObject* raw_obj = RawObject::FromAddr(cur); |
| cur += raw_obj->VisitPointers(visitor); |
| } |
| @@ -741,7 +819,19 @@ void Scavenger::VisitObjectPointers(ObjectPointerVisitor* visitor) const { |
| void Scavenger::VisitObjects(ObjectVisitor* visitor) const { |
| uword cur = FirstObjectStart(); |
| - while (cur < top_) { |
| + Thread* thread = Thread::Current(); |
| + |
| + if (!thread->IsMutatorThread()) { |
|
rmacnak
2017/07/05 17:39:53
Ditto
danunez
2017/07/05 18:12:55
Done.
|
| + thread = Isolate::Current()->mutator_thread(); |
| + } |
| + |
| + uword top = CapacityInWords() == 0 ? 0 : thread->top(); |
| + |
| + if (thread->heap() == NULL) { |
| + top = top_; |
| + } |
| + |
| + while (cur < top) { |
| RawObject* raw_obj = RawObject::FromAddr(cur); |
| visitor->VisitObject(raw_obj); |
| cur += raw_obj->Size(); |
| @@ -756,9 +846,21 @@ void Scavenger::AddRegionsToObjectSet(ObjectSet* set) const { |
| RawObject* Scavenger::FindObject(FindObjectVisitor* visitor) const { |
| ASSERT(!scavenging_); |
| + Thread* thread = Thread::Current(); |
| + |
| + if (!thread->IsMutatorThread()) { |
|
rmacnak
2017/07/05 17:39:52
Ditto
danunez
2017/07/05 18:12:55
Done.
|
| + thread = Isolate::Current()->mutator_thread(); |
| + } |
| + |
| + uword top = CapacityInWords() == 0 ? 0 : thread->top(); |
| uword cur = FirstObjectStart(); |
| - if (visitor->VisitRange(cur, top_)) { |
| - while (cur < top_) { |
| + |
| + if (thread->heap() == NULL) { |
| + top = top_; |
| + } |
| + |
| + if (visitor->VisitRange(cur, top)) { |
| + while (cur < top) { |
| RawObject* raw_obj = RawObject::FromAddr(cur); |
| uword next = cur + raw_obj->Size(); |
| if (visitor->VisitRange(cur, next) && raw_obj->FindObject(visitor)) { |
| @@ -766,7 +868,6 @@ RawObject* Scavenger::FindObject(FindObjectVisitor* visitor) const { |
| } |
| cur = next; |
| } |
| - ASSERT(cur == top_); |
| } |
| return Object::null(); |
| } |