OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_ISOLATE_H_ | 5 #ifndef VM_ISOLATE_H_ |
6 #define VM_ISOLATE_H_ | 6 #define VM_ISOLATE_H_ |
7 | 7 |
8 #include "include/dart_api.h" | 8 #include "include/dart_api.h" |
9 #include "platform/assert.h" | 9 #include "platform/assert.h" |
10 #include "platform/thread.h" | 10 #include "platform/thread.h" |
11 #include "vm/base_isolate.h" | 11 #include "vm/base_isolate.h" |
12 #include "vm/class_table.h" | 12 #include "vm/class_table.h" |
13 #include "vm/gc_callbacks.h" | 13 #include "vm/gc_callbacks.h" |
14 #include "vm/megamorphic_cache_table.h" | 14 #include "vm/megamorphic_cache_table.h" |
15 #include "vm/store_buffer.h" | 15 #include "vm/store_buffer.h" |
16 #include "vm/timer.h" | 16 #include "vm/timer.h" |
17 | 17 |
18 namespace dart { | 18 namespace dart { |
19 | 19 |
20 // Forward declarations. | 20 // Forward declarations. |
21 class ApiState; | 21 class ApiState; |
22 class CodeIndexTable; | 22 class CodeIndexTable; |
23 class Debugger; | 23 class Debugger; |
24 class Function; | 24 class Function; |
25 class HandleScope; | 25 class HandleScope; |
26 class HandleVisitor; | 26 class HandleVisitor; |
27 class Heap; | 27 class Heap; |
28 class ICData; | 28 class ICData; |
29 class Instance; | |
29 class LongJump; | 30 class LongJump; |
30 class MessageHandler; | 31 class MessageHandler; |
31 class Mutex; | 32 class Mutex; |
32 class ObjectPointerVisitor; | 33 class ObjectPointerVisitor; |
33 class ObjectStore; | 34 class ObjectStore; |
34 class RawInstance; | 35 class RawInstance; |
35 class RawArray; | 36 class RawArray; |
36 class RawContext; | 37 class RawContext; |
37 class RawDouble; | 38 class RawDouble; |
38 class RawMint; | 39 class RawMint; |
39 class RawObject; | 40 class RawObject; |
40 class RawInteger; | 41 class RawInteger; |
41 class RawError; | 42 class RawError; |
42 class Simulator; | 43 class Simulator; |
43 class StackResource; | 44 class StackResource; |
44 class StackZone; | 45 class StackZone; |
45 class StubCode; | 46 class StubCode; |
46 class RawFloat32x4; | 47 class RawFloat32x4; |
47 class RawUint32x4; | 48 class RawUint32x4; |
48 | 49 |
49 | 50 |
50 // Used by the deoptimization infrastructure to defer allocation of unboxed | 51 // Used by the deoptimization infrastructure to defer allocation of unboxed |
51 // objects until frame is fully rewritten and GC is safe. | 52 // objects until frame is fully rewritten and GC is safe. |
52 // See callers of Isolate::DeferObjectMaterialization. | 53 // Describes a stack slot that should be populated with a reference to the |
53 class DeferredObject { | 54 // materialized object. |
55 class DeferredSlot { | |
54 public: | 56 public: |
55 DeferredObject(RawInstance** slot, DeferredObject* next) | 57 DeferredSlot(RawInstance** slot, DeferredSlot* next) |
56 : slot_(slot), next_(next) { } | 58 : slot_(slot), next_(next) { } |
57 virtual ~DeferredObject() { } | 59 virtual ~DeferredSlot() { } |
58 | 60 |
59 RawInstance** slot() const { return slot_; } | 61 RawInstance** slot() const { return slot_; } |
60 DeferredObject* next() const { return next_; } | 62 DeferredSlot* next() const { return next_; } |
61 | 63 |
62 virtual void Materialize() = 0; | 64 virtual void Materialize() = 0; |
63 | 65 |
64 private: | 66 private: |
65 RawInstance** const slot_; | 67 RawInstance** const slot_; |
66 DeferredObject* const next_; | 68 DeferredSlot* const next_; |
67 | 69 |
68 DISALLOW_COPY_AND_ASSIGN(DeferredObject); | 70 DISALLOW_COPY_AND_ASSIGN(DeferredSlot); |
69 }; | 71 }; |
70 | 72 |
71 | 73 |
72 class DeferredDouble : public DeferredObject { | 74 class DeferredDouble : public DeferredSlot { |
73 public: | 75 public: |
74 DeferredDouble(double value, RawInstance** slot, DeferredObject* next) | 76 DeferredDouble(double value, RawInstance** slot, DeferredSlot* next) |
75 : DeferredObject(slot, next), value_(value) { } | 77 : DeferredSlot(slot, next), value_(value) { } |
76 | 78 |
77 virtual void Materialize(); | 79 virtual void Materialize(); |
78 | 80 |
79 double value() const { return value_; } | 81 double value() const { return value_; } |
80 | 82 |
81 private: | 83 private: |
82 const double value_; | 84 const double value_; |
83 | 85 |
84 DISALLOW_COPY_AND_ASSIGN(DeferredDouble); | 86 DISALLOW_COPY_AND_ASSIGN(DeferredDouble); |
85 }; | 87 }; |
86 | 88 |
87 | 89 |
88 class DeferredMint : public DeferredObject { | 90 class DeferredMint : public DeferredSlot { |
89 public: | 91 public: |
90 DeferredMint(int64_t value, RawInstance** slot, DeferredObject* next) | 92 DeferredMint(int64_t value, RawInstance** slot, DeferredSlot* next) |
91 : DeferredObject(slot, next), value_(value) { } | 93 : DeferredSlot(slot, next), value_(value) { } |
92 | 94 |
93 virtual void Materialize(); | 95 virtual void Materialize(); |
94 | 96 |
95 int64_t value() const { return value_; } | 97 int64_t value() const { return value_; } |
96 | 98 |
97 private: | 99 private: |
98 const int64_t value_; | 100 const int64_t value_; |
99 | 101 |
100 DISALLOW_COPY_AND_ASSIGN(DeferredMint); | 102 DISALLOW_COPY_AND_ASSIGN(DeferredMint); |
101 }; | 103 }; |
102 | 104 |
103 | 105 |
104 class DeferredFloat32x4 : public DeferredObject { | 106 class DeferredFloat32x4 : public DeferredSlot { |
105 public: | 107 public: |
106 DeferredFloat32x4(simd128_value_t value, RawInstance** slot, | 108 DeferredFloat32x4(simd128_value_t value, RawInstance** slot, |
107 DeferredObject* next) | 109 DeferredSlot* next) |
108 : DeferredObject(slot, next), value_(value) { } | 110 : DeferredSlot(slot, next), value_(value) { } |
109 | 111 |
110 virtual void Materialize(); | 112 virtual void Materialize(); |
111 | 113 |
112 simd128_value_t value() const { return value_; } | 114 simd128_value_t value() const { return value_; } |
113 | 115 |
114 private: | 116 private: |
115 const simd128_value_t value_; | 117 const simd128_value_t value_; |
116 | 118 |
117 DISALLOW_COPY_AND_ASSIGN(DeferredFloat32x4); | 119 DISALLOW_COPY_AND_ASSIGN(DeferredFloat32x4); |
118 }; | 120 }; |
119 | 121 |
120 | 122 |
121 class DeferredUint32x4 : public DeferredObject { | 123 class DeferredUint32x4 : public DeferredSlot { |
122 public: | 124 public: |
123 DeferredUint32x4(simd128_value_t value, RawInstance** slot, | 125 DeferredUint32x4(simd128_value_t value, RawInstance** slot, |
124 DeferredObject* next) | 126 DeferredSlot* next) |
125 : DeferredObject(slot, next), value_(value) { } | 127 : DeferredSlot(slot, next), value_(value) { } |
126 | 128 |
127 virtual void Materialize(); | 129 virtual void Materialize(); |
128 | 130 |
129 simd128_value_t value() const { return value_; } | 131 simd128_value_t value() const { return value_; } |
130 | 132 |
131 private: | 133 private: |
132 const simd128_value_t value_; | 134 const simd128_value_t value_; |
133 | 135 |
134 DISALLOW_COPY_AND_ASSIGN(DeferredUint32x4); | 136 DISALLOW_COPY_AND_ASSIGN(DeferredUint32x4); |
135 }; | 137 }; |
136 | 138 |
137 | 139 |
140 // Describes a slot that contains a reference to an object that had its | |
141 // allocation removed by AllocationSinking pass. | |
142 // Object itself is described and materialized by DeferredObject. | |
143 class DeferredObjectRef : public DeferredSlot { | |
144 public: | |
145 DeferredObjectRef(intptr_t index, RawInstance** slot, DeferredSlot* next) | |
146 : DeferredSlot(slot, next), index_(index) { } | |
147 | |
148 virtual void Materialize(); | |
149 | |
150 intptr_t index() const { return index_; } | |
151 | |
152 private: | |
153 const intptr_t index_; | |
srdjan
2013/05/07 23:11:45
Add DISALLOW_COPY_...
Vyacheslav Egorov (Google)
2013/05/07 23:28:21
Done.
| |
154 }; | |
155 | |
156 | |
157 // Describes an object which allocation was removed by AllocationSinking pass. | |
158 // Arguments for materialization are stored as a part of expression stack | |
159 // for the bottommost deoptimized frame so that GC could discover them. | |
160 // They will be removed from the stack at the very end of deoptimization. | |
161 class DeferredObject { | |
162 public: | |
163 DeferredObject(intptr_t field_count, intptr_t* args) | |
164 : field_count_(field_count), | |
165 args_(reinterpret_cast<RawObject**>(args)), | |
166 object_(NULL) { } | |
167 | |
168 intptr_t ArgumentCount() const { | |
169 return kFieldsStartIndex + kFieldEntrySize * field_count_; | |
170 } | |
171 | |
172 RawInstance* object(); | |
173 | |
174 private: | |
175 enum { | |
176 kClassIndex = 0, | |
177 kFieldsStartIndex = kClassIndex + 1 | |
178 }; | |
179 | |
180 enum { | |
181 kFieldIndex = 0, | |
182 kValueIndex, | |
183 kFieldEntrySize, | |
184 }; | |
185 | |
186 // Materializes the object. Returns amount of values that were consumed | |
187 // and should be removed from the expression stack at the very end of | |
188 // deoptimization. | |
189 void Materialize(); | |
190 | |
191 RawObject* GetClass() const { | |
192 return args_[kClassIndex]; | |
193 } | |
194 | |
195 RawObject* GetField(intptr_t index) const { | |
196 return args_[kFieldsStartIndex + kFieldEntrySize * index + kFieldIndex]; | |
197 } | |
198 | |
199 RawObject* GetValue(intptr_t index) const { | |
200 return args_[kFieldsStartIndex + kFieldEntrySize * index + kValueIndex]; | |
201 } | |
202 | |
203 // Amount of fields that have to be initialized. | |
204 const intptr_t field_count_; | |
205 | |
206 // Pointer to the first materialization argument on the stack. | |
207 // The first argument is Class of the instance to materialize followed by | |
208 // Field, value pairs. | |
209 RawObject** args_; | |
210 | |
211 const Instance* object_; | |
srdjan
2013/05/07 23:11:45
ditto
Vyacheslav Egorov (Google)
2013/05/07 23:28:21
Done.
| |
212 }; | |
213 | |
214 | |
138 class Isolate : public BaseIsolate { | 215 class Isolate : public BaseIsolate { |
139 public: | 216 public: |
140 ~Isolate(); | 217 ~Isolate(); |
141 | 218 |
142 static inline Isolate* Current() { | 219 static inline Isolate* Current() { |
143 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key)); | 220 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key)); |
144 } | 221 } |
145 | 222 |
146 static void SetCurrent(Isolate* isolate); | 223 static void SetCurrent(Isolate* isolate); |
147 | 224 |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
417 } | 494 } |
418 intptr_t* deopt_frame_copy() const { return deopt_frame_copy_; } | 495 intptr_t* deopt_frame_copy() const { return deopt_frame_copy_; } |
419 void SetDeoptFrameCopy(intptr_t* value, intptr_t size) { | 496 void SetDeoptFrameCopy(intptr_t* value, intptr_t size) { |
420 ASSERT((value == NULL) || (size > 0)); | 497 ASSERT((value == NULL) || (size > 0)); |
421 ASSERT((value == NULL) || (deopt_frame_copy_ == NULL)); | 498 ASSERT((value == NULL) || (deopt_frame_copy_ == NULL)); |
422 deopt_frame_copy_ = value; | 499 deopt_frame_copy_ = value; |
423 deopt_frame_copy_size_ = size; | 500 deopt_frame_copy_size_ = size; |
424 } | 501 } |
425 intptr_t deopt_frame_copy_size() const { return deopt_frame_copy_size_; } | 502 intptr_t deopt_frame_copy_size() const { return deopt_frame_copy_size_; } |
426 | 503 |
504 void PrepareForDeferredMaterialization(intptr_t count) { | |
505 if (count > 0) { | |
506 deferred_objects_ = new DeferredObject*[count]; | |
507 deferred_objects_count_ = count; | |
508 } | |
509 } | |
510 | |
511 void DeleteDeferredObjects() { | |
512 for (intptr_t i = 0; i < deferred_objects_count_; i++) { | |
513 delete deferred_objects_[i]; | |
514 } | |
515 delete[] deferred_objects_; | |
516 deferred_objects_ = NULL; | |
517 deferred_objects_count_ = 0; | |
518 } | |
519 | |
520 DeferredObject* GetDeferredObject(intptr_t idx) const { | |
521 return deferred_objects_[idx]; | |
522 } | |
523 | |
524 void SetDeferredObjectAt(intptr_t idx, DeferredObject* object) { | |
525 deferred_objects_[idx] = object; | |
526 } | |
527 | |
528 intptr_t DeferredObjectsCount() const { | |
529 return deferred_objects_count_; | |
530 } | |
531 | |
532 void DeferMaterializedObjectRef(intptr_t idx, intptr_t* slot) { | |
533 deferred_object_refs_ = new DeferredObjectRef( | |
534 idx, | |
535 reinterpret_cast<RawInstance**>(slot), | |
536 deferred_object_refs_); | |
537 } | |
538 | |
427 void DeferDoubleMaterialization(double value, RawDouble** slot) { | 539 void DeferDoubleMaterialization(double value, RawDouble** slot) { |
428 deferred_objects_ = new DeferredDouble( | 540 deferred_boxes_ = new DeferredDouble( |
429 value, | 541 value, |
430 reinterpret_cast<RawInstance**>(slot), | 542 reinterpret_cast<RawInstance**>(slot), |
431 deferred_objects_); | 543 deferred_boxes_); |
432 } | 544 } |
433 | 545 |
434 void DeferMintMaterialization(int64_t value, RawMint** slot) { | 546 void DeferMintMaterialization(int64_t value, RawMint** slot) { |
435 deferred_objects_ = new DeferredMint(value, | 547 deferred_boxes_ = new DeferredMint( |
436 reinterpret_cast<RawInstance**>(slot), | 548 value, |
437 deferred_objects_); | 549 reinterpret_cast<RawInstance**>(slot), |
550 deferred_boxes_); | |
438 } | 551 } |
439 | 552 |
440 void DeferFloat32x4Materialization(simd128_value_t value, | 553 void DeferFloat32x4Materialization(simd128_value_t value, |
441 RawFloat32x4** slot) { | 554 RawFloat32x4** slot) { |
442 deferred_objects_ = new DeferredFloat32x4( | 555 deferred_boxes_ = new DeferredFloat32x4( |
443 value, | 556 value, |
444 reinterpret_cast<RawInstance**>(slot), | 557 reinterpret_cast<RawInstance**>(slot), |
445 deferred_objects_); | 558 deferred_boxes_); |
446 } | 559 } |
447 | 560 |
448 void DeferUint32x4Materialization(simd128_value_t value, | 561 void DeferUint32x4Materialization(simd128_value_t value, |
449 RawUint32x4** slot) { | 562 RawUint32x4** slot) { |
450 deferred_objects_ = new DeferredUint32x4( | 563 deferred_boxes_ = new DeferredUint32x4( |
451 value, | 564 value, |
452 reinterpret_cast<RawInstance**>(slot), | 565 reinterpret_cast<RawInstance**>(slot), |
453 deferred_objects_); | 566 deferred_boxes_); |
454 } | 567 } |
455 | 568 |
456 DeferredObject* DetachDeferredObjects() { | 569 // Populate all deferred slots that contain boxes for double, mint, simd |
457 DeferredObject* list = deferred_objects_; | 570 // values. |
458 deferred_objects_ = NULL; | 571 void MaterializeDeferredBoxes(); |
459 return list; | 572 |
460 } | 573 // Populate all slots containing references to objects which allocations |
574 // were eliminated by AllocationSinking pass. | |
575 void MaterializeDeferredObjects(); | |
461 | 576 |
462 static char* GetStatus(const char* request); | 577 static char* GetStatus(const char* request); |
463 | 578 |
464 private: | 579 private: |
465 Isolate(); | 580 Isolate(); |
466 | 581 |
467 void BuildName(const char* name_prefix); | 582 void BuildName(const char* name_prefix); |
468 void PrintInvokedFunctions(); | 583 void PrintInvokedFunctions(); |
469 | 584 |
470 static bool FetchStacktrace(); | 585 static bool FetchStacktrace(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
504 bool is_runnable_; | 619 bool is_runnable_; |
505 IsolateRunState running_state_; | 620 IsolateRunState running_state_; |
506 GcPrologueCallbacks gc_prologue_callbacks_; | 621 GcPrologueCallbacks gc_prologue_callbacks_; |
507 GcEpilogueCallbacks gc_epilogue_callbacks_; | 622 GcEpilogueCallbacks gc_epilogue_callbacks_; |
508 | 623 |
509 // Deoptimization support. | 624 // Deoptimization support. |
510 intptr_t* deopt_cpu_registers_copy_; | 625 intptr_t* deopt_cpu_registers_copy_; |
511 fpu_register_t* deopt_fpu_registers_copy_; | 626 fpu_register_t* deopt_fpu_registers_copy_; |
512 intptr_t* deopt_frame_copy_; | 627 intptr_t* deopt_frame_copy_; |
513 intptr_t deopt_frame_copy_size_; | 628 intptr_t deopt_frame_copy_size_; |
514 DeferredObject* deferred_objects_; | 629 DeferredSlot* deferred_boxes_; |
630 DeferredSlot* deferred_object_refs_; | |
631 | |
632 intptr_t deferred_objects_count_; | |
633 DeferredObject** deferred_objects_; | |
515 | 634 |
516 // Status support. | 635 // Status support. |
517 char* stacktrace_; | 636 char* stacktrace_; |
518 intptr_t stack_frame_index_; | 637 intptr_t stack_frame_index_; |
519 | 638 |
520 static Dart_IsolateCreateCallback create_callback_; | 639 static Dart_IsolateCreateCallback create_callback_; |
521 static Dart_IsolateInterruptCallback interrupt_callback_; | 640 static Dart_IsolateInterruptCallback interrupt_callback_; |
522 static Dart_IsolateUnhandledExceptionCallback unhandled_exception_callback_; | 641 static Dart_IsolateUnhandledExceptionCallback unhandled_exception_callback_; |
523 static Dart_IsolateShutdownCallback shutdown_callback_; | 642 static Dart_IsolateShutdownCallback shutdown_callback_; |
524 static Dart_FileOpenCallback file_open_callback_; | 643 static Dart_FileOpenCallback file_open_callback_; |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
638 | 757 |
639 private: | 758 private: |
640 Isolate::IsolateRunState saved_state_; | 759 Isolate::IsolateRunState saved_state_; |
641 | 760 |
642 DISALLOW_COPY_AND_ASSIGN(IsolateRunStateManager); | 761 DISALLOW_COPY_AND_ASSIGN(IsolateRunStateManager); |
643 }; | 762 }; |
644 | 763 |
645 } // namespace dart | 764 } // namespace dart |
646 | 765 |
647 #endif // VM_ISOLATE_H_ | 766 #endif // VM_ISOLATE_H_ |
OLD | NEW |