| Index: runtime/vm/isolate.h
|
| diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
|
| index c87b56d14dda8d4222db37798b383637e6461166..407a9f8d44256acee161098392c44cd8ccc320bd 100644
|
| --- a/runtime/vm/isolate.h
|
| +++ b/runtime/vm/isolate.h
|
| @@ -26,6 +26,7 @@ class HandleScope;
|
| class HandleVisitor;
|
| class Heap;
|
| class ICData;
|
| +class Instance;
|
| class LongJump;
|
| class MessageHandler;
|
| class Mutex;
|
| @@ -49,30 +50,31 @@ class RawUint32x4;
|
|
|
| // Used by the deoptimization infrastructure to defer allocation of unboxed
|
| // objects until frame is fully rewritten and GC is safe.
|
| -// See callers of Isolate::DeferObjectMaterialization.
|
| -class DeferredObject {
|
| +// Describes a stack slot that should be populated with a reference to the
|
| +// materialized object.
|
| +class DeferredSlot {
|
| public:
|
| - DeferredObject(RawInstance** slot, DeferredObject* next)
|
| + DeferredSlot(RawInstance** slot, DeferredSlot* next)
|
| : slot_(slot), next_(next) { }
|
| - virtual ~DeferredObject() { }
|
| + virtual ~DeferredSlot() { }
|
|
|
| RawInstance** slot() const { return slot_; }
|
| - DeferredObject* next() const { return next_; }
|
| + DeferredSlot* next() const { return next_; }
|
|
|
| virtual void Materialize() = 0;
|
|
|
| private:
|
| RawInstance** const slot_;
|
| - DeferredObject* const next_;
|
| + DeferredSlot* const next_;
|
|
|
| - DISALLOW_COPY_AND_ASSIGN(DeferredObject);
|
| + DISALLOW_COPY_AND_ASSIGN(DeferredSlot);
|
| };
|
|
|
|
|
| -class DeferredDouble : public DeferredObject {
|
| +class DeferredDouble : public DeferredSlot {
|
| public:
|
| - DeferredDouble(double value, RawInstance** slot, DeferredObject* next)
|
| - : DeferredObject(slot, next), value_(value) { }
|
| + DeferredDouble(double value, RawInstance** slot, DeferredSlot* next)
|
| + : DeferredSlot(slot, next), value_(value) { }
|
|
|
| virtual void Materialize();
|
|
|
| @@ -85,10 +87,10 @@ class DeferredDouble : public DeferredObject {
|
| };
|
|
|
|
|
| -class DeferredMint : public DeferredObject {
|
| +class DeferredMint : public DeferredSlot {
|
| public:
|
| - DeferredMint(int64_t value, RawInstance** slot, DeferredObject* next)
|
| - : DeferredObject(slot, next), value_(value) { }
|
| + DeferredMint(int64_t value, RawInstance** slot, DeferredSlot* next)
|
| + : DeferredSlot(slot, next), value_(value) { }
|
|
|
| virtual void Materialize();
|
|
|
| @@ -101,11 +103,11 @@ class DeferredMint : public DeferredObject {
|
| };
|
|
|
|
|
| -class DeferredFloat32x4 : public DeferredObject {
|
| +class DeferredFloat32x4 : public DeferredSlot {
|
| public:
|
| DeferredFloat32x4(simd128_value_t value, RawInstance** slot,
|
| - DeferredObject* next)
|
| - : DeferredObject(slot, next), value_(value) { }
|
| + DeferredSlot* next)
|
| + : DeferredSlot(slot, next), value_(value) { }
|
|
|
| virtual void Materialize();
|
|
|
| @@ -118,11 +120,11 @@ class DeferredFloat32x4 : public DeferredObject {
|
| };
|
|
|
|
|
| -class DeferredUint32x4 : public DeferredObject {
|
| +class DeferredUint32x4 : public DeferredSlot {
|
| public:
|
| DeferredUint32x4(simd128_value_t value, RawInstance** slot,
|
| - DeferredObject* next)
|
| - : DeferredObject(slot, next), value_(value) { }
|
| + DeferredSlot* next)
|
| + : DeferredSlot(slot, next), value_(value) { }
|
|
|
| virtual void Materialize();
|
|
|
| @@ -135,6 +137,86 @@ class DeferredUint32x4 : public DeferredObject {
|
| };
|
|
|
|
|
| +// Describes a slot that contains a reference to an object that had its
|
| +// allocation removed by AllocationSinking pass.
|
| +// Object itself is described and materialized by DeferredObject.
|
| +class DeferredObjectRef : public DeferredSlot {
|
| + public:
|
| + DeferredObjectRef(intptr_t index, RawInstance** slot, DeferredSlot* next)
|
| + : DeferredSlot(slot, next), index_(index) { }
|
| +
|
| + virtual void Materialize();
|
| +
|
| + intptr_t index() const { return index_; }
|
| +
|
| + private:
|
| + const intptr_t index_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(DeferredObjectRef);
|
| +};
|
| +
|
| +
|
| +// Describes an object which allocation was removed by AllocationSinking pass.
|
| +// Arguments for materialization are stored as a part of expression stack
|
| +// for the bottommost deoptimized frame so that GC could discover them.
|
| +// They will be removed from the stack at the very end of deoptimization.
|
| +class DeferredObject {
|
| + public:
|
| + DeferredObject(intptr_t field_count, intptr_t* args)
|
| + : field_count_(field_count),
|
| + args_(reinterpret_cast<RawObject**>(args)),
|
| + object_(NULL) { }
|
| +
|
| + intptr_t ArgumentCount() const {
|
| + return kFieldsStartIndex + kFieldEntrySize * field_count_;
|
| + }
|
| +
|
| + RawInstance* object();
|
| +
|
| + private:
|
| + enum {
|
| + kClassIndex = 0,
|
| + kFieldsStartIndex = kClassIndex + 1
|
| + };
|
| +
|
| + enum {
|
| + kFieldIndex = 0,
|
| + kValueIndex,
|
| + kFieldEntrySize,
|
| + };
|
| +
|
| + // Materializes the object. Returns amount of values that were consumed
|
| + // and should be removed from the expression stack at the very end of
|
| + // deoptimization.
|
| + void Materialize();
|
| +
|
| + RawObject* GetClass() const {
|
| + return args_[kClassIndex];
|
| + }
|
| +
|
| + RawObject* GetField(intptr_t index) const {
|
| + return args_[kFieldsStartIndex + kFieldEntrySize * index + kFieldIndex];
|
| + }
|
| +
|
| + RawObject* GetValue(intptr_t index) const {
|
| + return args_[kFieldsStartIndex + kFieldEntrySize * index + kValueIndex];
|
| + }
|
| +
|
| + // Amount of fields that have to be initialized.
|
| + const intptr_t field_count_;
|
| +
|
| + // Pointer to the first materialization argument on the stack.
|
| + // The first argument is Class of the instance to materialize followed by
|
| + // Field, value pairs.
|
| + RawObject** args_;
|
| +
|
| + // Object materialized from this description.
|
| + const Instance* object_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(DeferredObject);
|
| +};
|
| +
|
| +
|
| class Isolate : public BaseIsolate {
|
| public:
|
| ~Isolate();
|
| @@ -424,40 +506,78 @@ class Isolate : public BaseIsolate {
|
| }
|
| intptr_t deopt_frame_copy_size() const { return deopt_frame_copy_size_; }
|
|
|
| + void PrepareForDeferredMaterialization(intptr_t count) {
|
| + if (count > 0) {
|
| + deferred_objects_ = new DeferredObject*[count];
|
| + deferred_objects_count_ = count;
|
| + }
|
| + }
|
| +
|
| + void DeleteDeferredObjects() {
|
| + for (intptr_t i = 0; i < deferred_objects_count_; i++) {
|
| + delete deferred_objects_[i];
|
| + }
|
| + delete[] deferred_objects_;
|
| + deferred_objects_ = NULL;
|
| + deferred_objects_count_ = 0;
|
| + }
|
| +
|
| + DeferredObject* GetDeferredObject(intptr_t idx) const {
|
| + return deferred_objects_[idx];
|
| + }
|
| +
|
| + void SetDeferredObjectAt(intptr_t idx, DeferredObject* object) {
|
| + deferred_objects_[idx] = object;
|
| + }
|
| +
|
| + intptr_t DeferredObjectsCount() const {
|
| + return deferred_objects_count_;
|
| + }
|
| +
|
| + void DeferMaterializedObjectRef(intptr_t idx, intptr_t* slot) {
|
| + deferred_object_refs_ = new DeferredObjectRef(
|
| + idx,
|
| + reinterpret_cast<RawInstance**>(slot),
|
| + deferred_object_refs_);
|
| + }
|
| +
|
| void DeferDoubleMaterialization(double value, RawDouble** slot) {
|
| - deferred_objects_ = new DeferredDouble(
|
| + deferred_boxes_ = new DeferredDouble(
|
| value,
|
| reinterpret_cast<RawInstance**>(slot),
|
| - deferred_objects_);
|
| + deferred_boxes_);
|
| }
|
|
|
| void DeferMintMaterialization(int64_t value, RawMint** slot) {
|
| - deferred_objects_ = new DeferredMint(value,
|
| - reinterpret_cast<RawInstance**>(slot),
|
| - deferred_objects_);
|
| + deferred_boxes_ = new DeferredMint(
|
| + value,
|
| + reinterpret_cast<RawInstance**>(slot),
|
| + deferred_boxes_);
|
| }
|
|
|
| void DeferFloat32x4Materialization(simd128_value_t value,
|
| RawFloat32x4** slot) {
|
| - deferred_objects_ = new DeferredFloat32x4(
|
| + deferred_boxes_ = new DeferredFloat32x4(
|
| value,
|
| reinterpret_cast<RawInstance**>(slot),
|
| - deferred_objects_);
|
| + deferred_boxes_);
|
| }
|
|
|
| void DeferUint32x4Materialization(simd128_value_t value,
|
| RawUint32x4** slot) {
|
| - deferred_objects_ = new DeferredUint32x4(
|
| + deferred_boxes_ = new DeferredUint32x4(
|
| value,
|
| reinterpret_cast<RawInstance**>(slot),
|
| - deferred_objects_);
|
| + deferred_boxes_);
|
| }
|
|
|
| - DeferredObject* DetachDeferredObjects() {
|
| - DeferredObject* list = deferred_objects_;
|
| - deferred_objects_ = NULL;
|
| - return list;
|
| - }
|
| + // Populate all deferred slots that contain boxes for double, mint, simd
|
| + // values.
|
| + void MaterializeDeferredBoxes();
|
| +
|
| + // Populate all slots containing references to objects which allocations
|
| + // were eliminated by AllocationSinking pass.
|
| + void MaterializeDeferredObjects();
|
|
|
| static char* GetStatus(const char* request);
|
|
|
| @@ -511,7 +631,11 @@ class Isolate : public BaseIsolate {
|
| fpu_register_t* deopt_fpu_registers_copy_;
|
| intptr_t* deopt_frame_copy_;
|
| intptr_t deopt_frame_copy_size_;
|
| - DeferredObject* deferred_objects_;
|
| + DeferredSlot* deferred_boxes_;
|
| + DeferredSlot* deferred_object_refs_;
|
| +
|
| + intptr_t deferred_objects_count_;
|
| + DeferredObject** deferred_objects_;
|
|
|
| // Status support.
|
| char* stacktrace_;
|
|
|