| Index: runtime/vm/scavenger.cc
|
| diff --git a/runtime/vm/scavenger.cc b/runtime/vm/scavenger.cc
|
| index 81cc55d5c781eb729c29f6685289ab9de22a58e2..275817db8d0960f35aaac93290f8b50ac27e0582 100644
|
| --- a/runtime/vm/scavenger.cc
|
| +++ b/runtime/vm/scavenger.cc
|
| @@ -9,8 +9,8 @@
|
| #include "vm/isolate.h"
|
| #include "vm/lockers.h"
|
| #include "vm/object.h"
|
| -#include "vm/object_set.h"
|
| #include "vm/object_id_ring.h"
|
| +#include "vm/object_set.h"
|
| #include "vm/safepoint.h"
|
| #include "vm/stack_frame.h"
|
| #include "vm/store_buffer.h"
|
| @@ -42,27 +42,23 @@ enum {
|
| kForwarded = kForwardingMask,
|
| };
|
|
|
| -
|
| static inline bool IsForwarding(uword header) {
|
| uword bits = header & kForwardingMask;
|
| ASSERT((bits == kNotForwarded) || (bits == kForwarded));
|
| return bits == kForwarded;
|
| }
|
|
|
| -
|
| static inline uword ForwardedAddr(uword header) {
|
| ASSERT(IsForwarding(header));
|
| return header & ~kForwardingMask;
|
| }
|
|
|
| -
|
| static inline void ForwardTo(uword original, uword target) {
|
| // Make sure forwarding can be encoded.
|
| ASSERT((target & kForwardingMask) == 0);
|
| *reinterpret_cast<uword*>(original) = target | kForwarded;
|
| }
|
|
|
| -
|
| class ScavengerVisitor : public ObjectPointerVisitor {
|
| public:
|
| explicit ScavengerVisitor(Isolate* isolate,
|
| @@ -193,7 +189,6 @@ class ScavengerVisitor : public ObjectPointerVisitor {
|
| DISALLOW_COPY_AND_ASSIGN(ScavengerVisitor);
|
| };
|
|
|
| -
|
| class ScavengerWeakVisitor : public HandleVisitor {
|
| public:
|
| ScavengerWeakVisitor(Thread* thread, Scavenger* scavenger)
|
| @@ -218,7 +213,6 @@ class ScavengerWeakVisitor : public HandleVisitor {
|
| DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor);
|
| };
|
|
|
| -
|
| // Visitor used to verify that all old->new references have been added to the
|
| // StoreBuffers.
|
| class VerifyStoreBufferPointerVisitor : public ObjectPointerVisitor {
|
| @@ -241,7 +235,6 @@ class VerifyStoreBufferPointerVisitor : public ObjectPointerVisitor {
|
| DISALLOW_COPY_AND_ASSIGN(VerifyStoreBufferPointerVisitor);
|
| };
|
|
|
| -
|
| SemiSpace::SemiSpace(VirtualMemory* reserved)
|
| : reserved_(reserved), region_(NULL, 0) {
|
| if (reserved != NULL) {
|
| @@ -249,29 +242,25 @@ SemiSpace::SemiSpace(VirtualMemory* reserved)
|
| }
|
| }
|
|
|
| -
|
| SemiSpace::~SemiSpace() {
|
| if (reserved_ != NULL) {
|
| #if defined(DEBUG)
|
| - memset(reserved_->address(), Heap::kZapByte, size_in_words()
|
| - << kWordSizeLog2);
|
| + memset(reserved_->address(), Heap::kZapByte,
|
| + size_in_words() << kWordSizeLog2);
|
| #endif // defined(DEBUG)
|
| delete reserved_;
|
| }
|
| }
|
|
|
| -
|
| Mutex* SemiSpace::mutex_ = NULL;
|
| SemiSpace* SemiSpace::cache_ = NULL;
|
|
|
| -
|
| void SemiSpace::InitOnce() {
|
| ASSERT(mutex_ == NULL);
|
| mutex_ = new Mutex();
|
| ASSERT(mutex_ != NULL);
|
| }
|
|
|
| -
|
| SemiSpace* SemiSpace::New(intptr_t size_in_words, const char* name) {
|
| {
|
| MutexLocker locker(mutex_);
|
| @@ -300,7 +289,6 @@ SemiSpace* SemiSpace::New(intptr_t size_in_words, const char* name) {
|
| }
|
| }
|
|
|
| -
|
| void SemiSpace::Delete() {
|
| #ifdef DEBUG
|
| if (reserved_ != NULL) {
|
| @@ -317,7 +305,6 @@ void SemiSpace::Delete() {
|
| delete old_cache;
|
| }
|
|
|
| -
|
| void SemiSpace::WriteProtect(bool read_only) {
|
| if (reserved_ != NULL) {
|
| bool success = reserved_->Protect(read_only ? VirtualMemory::kReadOnly
|
| @@ -326,7 +313,6 @@ void SemiSpace::WriteProtect(bool read_only) {
|
| }
|
| }
|
|
|
| -
|
| Scavenger::Scavenger(Heap* heap,
|
| intptr_t max_semi_capacity_in_words,
|
| uword object_alignment)
|
| @@ -366,13 +352,11 @@ Scavenger::Scavenger(Heap* heap,
|
| UpdateMaxHeapUsage();
|
| }
|
|
|
| -
|
| Scavenger::~Scavenger() {
|
| ASSERT(!scavenging_);
|
| to_->Delete();
|
| }
|
|
|
| -
|
| intptr_t Scavenger::NewSizeInWords(intptr_t old_size_in_words) const {
|
| if (stats_history_.Size() == 0) {
|
| return old_size_in_words;
|
| @@ -386,7 +370,6 @@ intptr_t Scavenger::NewSizeInWords(intptr_t old_size_in_words) const {
|
| }
|
| }
|
|
|
| -
|
| SemiSpace* Scavenger::Prologue(Isolate* isolate, bool invoke_api_callbacks) {
|
| if (invoke_api_callbacks && (isolate->gc_prologue_callback() != NULL)) {
|
| (isolate->gc_prologue_callback())();
|
| @@ -412,7 +395,6 @@ SemiSpace* Scavenger::Prologue(Isolate* isolate, bool invoke_api_callbacks) {
|
| return from;
|
| }
|
|
|
| -
|
| void Scavenger::Epilogue(Isolate* isolate,
|
| SemiSpace* from,
|
| bool invoke_api_callbacks) {
|
| @@ -455,7 +437,6 @@ void Scavenger::Epilogue(Isolate* isolate,
|
| }
|
| }
|
|
|
| -
|
| void Scavenger::IterateStoreBuffers(Isolate* isolate,
|
| ScavengerVisitor* visitor) {
|
| // Iterating through the store buffers.
|
| @@ -492,7 +473,6 @@ void Scavenger::IterateStoreBuffers(Isolate* isolate,
|
| visitor->VisitingOldObject(NULL);
|
| }
|
|
|
| -
|
| void Scavenger::IterateObjectIdTable(Isolate* isolate,
|
| ScavengerVisitor* visitor) {
|
| #ifndef PRODUCT
|
| @@ -509,7 +489,6 @@ void Scavenger::IterateObjectIdTable(Isolate* isolate,
|
| #endif // !PRODUCT
|
| }
|
|
|
| -
|
| void Scavenger::IterateRoots(Isolate* isolate, ScavengerVisitor* visitor) {
|
| int64_t start = OS::GetCurrentMonotonicMicros();
|
| isolate->VisitObjectPointers(visitor,
|
| @@ -524,7 +503,6 @@ void Scavenger::IterateRoots(Isolate* isolate, ScavengerVisitor* visitor) {
|
| heap_->RecordTime(kDummyScavengeTime, 0);
|
| }
|
|
|
| -
|
| bool Scavenger::IsUnreachable(RawObject** p) {
|
| RawObject* raw_obj = *p;
|
| if (!raw_obj->IsHeapObject()) {
|
| @@ -546,12 +524,10 @@ bool Scavenger::IsUnreachable(RawObject** p) {
|
| return true;
|
| }
|
|
|
| -
|
| void Scavenger::IterateWeakRoots(Isolate* isolate, HandleVisitor* visitor) {
|
| isolate->VisitWeakPersistentHandles(visitor);
|
| }
|
|
|
| -
|
| void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) {
|
| // Iterate until all work has been drained.
|
| while ((resolved_top_ < top_) || PromotedStackHasMore()) {
|
| @@ -613,7 +589,6 @@ void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) {
|
| }
|
| }
|
|
|
| -
|
| void Scavenger::UpdateMaxHeapCapacity() {
|
| if (heap_ == NULL) {
|
| // Some unit tests.
|
| @@ -627,7 +602,6 @@ void Scavenger::UpdateMaxHeapCapacity() {
|
| kWordSize);
|
| }
|
|
|
| -
|
| void Scavenger::UpdateMaxHeapUsage() {
|
| if (heap_ == NULL) {
|
| // Some unit tests.
|
| @@ -640,7 +614,6 @@ void Scavenger::UpdateMaxHeapUsage() {
|
| isolate->GetHeapNewUsedMaxMetric()->SetValue(UsedInWords() * kWordSize);
|
| }
|
|
|
| -
|
| void Scavenger::EnqueueWeakProperty(RawWeakProperty* raw_weak) {
|
| ASSERT(raw_weak->IsHeapObject());
|
| ASSERT(raw_weak->IsNewObject());
|
| @@ -655,7 +628,6 @@ void Scavenger::EnqueueWeakProperty(RawWeakProperty* raw_weak) {
|
| delayed_weak_properties_ = raw_weak;
|
| }
|
|
|
| -
|
| uword Scavenger::ProcessWeakProperty(RawWeakProperty* raw_weak,
|
| ScavengerVisitor* visitor) {
|
| // The fate of the weak property is determined by its key.
|
| @@ -673,7 +645,6 @@ uword Scavenger::ProcessWeakProperty(RawWeakProperty* raw_weak,
|
| return raw_weak->VisitPointersNonvirtual(visitor);
|
| }
|
|
|
| -
|
| void Scavenger::ProcessWeakReferences() {
|
| // Rehash the weak tables now that we know which objects survive this cycle.
|
| for (int sel = 0; sel < Heap::kNumWeakSelectors; sel++) {
|
| @@ -729,7 +700,6 @@ void Scavenger::ProcessWeakReferences() {
|
| }
|
| }
|
|
|
| -
|
| void Scavenger::VisitObjectPointers(ObjectPointerVisitor* visitor) const {
|
| uword cur = FirstObjectStart();
|
| while (cur < top_) {
|
| @@ -738,7 +708,6 @@ void Scavenger::VisitObjectPointers(ObjectPointerVisitor* visitor) const {
|
| }
|
| }
|
|
|
| -
|
| void Scavenger::VisitObjects(ObjectVisitor* visitor) const {
|
| uword cur = FirstObjectStart();
|
| while (cur < top_) {
|
| @@ -748,12 +717,10 @@ void Scavenger::VisitObjects(ObjectVisitor* visitor) const {
|
| }
|
| }
|
|
|
| -
|
| void Scavenger::AddRegionsToObjectSet(ObjectSet* set) const {
|
| set->AddRegion(to_->start(), to_->end());
|
| }
|
|
|
| -
|
| RawObject* Scavenger::FindObject(FindObjectVisitor* visitor) const {
|
| ASSERT(!scavenging_);
|
| uword cur = FirstObjectStart();
|
| @@ -771,14 +738,12 @@ RawObject* Scavenger::FindObject(FindObjectVisitor* visitor) const {
|
| return Object::null();
|
| }
|
|
|
| -
|
| void Scavenger::Scavenge() {
|
| // TODO(cshapiro): Add a decision procedure for determining when the
|
| // the API callbacks should be invoked.
|
| Scavenge(false);
|
| }
|
|
|
| -
|
| void Scavenger::Scavenge(bool invoke_api_callbacks) {
|
| Isolate* isolate = heap_->isolate();
|
| // Ensure that all threads for this isolate are at a safepoint (either stopped
|
| @@ -857,13 +822,11 @@ void Scavenger::Scavenge(bool invoke_api_callbacks) {
|
| scavenging_ = false;
|
| }
|
|
|
| -
|
| void Scavenger::WriteProtect(bool read_only) {
|
| ASSERT(!scavenging_);
|
| to_->WriteProtect(read_only);
|
| }
|
|
|
| -
|
| #ifndef PRODUCT
|
| void Scavenger::PrintToJSONObject(JSONObject* object) const {
|
| if (!FLAG_support_service) {
|
| @@ -894,20 +857,17 @@ void Scavenger::PrintToJSONObject(JSONObject* object) const {
|
| }
|
| #endif // !PRODUCT
|
|
|
| -
|
| void Scavenger::AllocateExternal(intptr_t size) {
|
| ASSERT(size >= 0);
|
| external_size_ += size;
|
| }
|
|
|
| -
|
| void Scavenger::FreeExternal(intptr_t size) {
|
| ASSERT(size >= 0);
|
| external_size_ -= size;
|
| ASSERT(external_size_ >= 0);
|
| }
|
|
|
| -
|
| void Scavenger::Evacuate() {
|
| // We need a safepoint here to prevent allocation right before or right after
|
| // the scavenge.
|
|
|