OLD | NEW |
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_STUB_CODE_H_ | 5 #ifndef VM_STUB_CODE_H_ |
6 #define VM_STUB_CODE_H_ | 6 #define VM_STUB_CODE_H_ |
7 | 7 |
8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
10 | 10 |
(...skipping 17 matching lines...) Expand all Loading... |
28 V(LazyCompile) \ | 28 V(LazyCompile) \ |
29 V(CallBootstrapCFunction) \ | 29 V(CallBootstrapCFunction) \ |
30 V(CallNativeCFunction) \ | 30 V(CallNativeCFunction) \ |
31 V(FixCallersTarget) \ | 31 V(FixCallersTarget) \ |
32 V(CallStaticFunction) \ | 32 V(CallStaticFunction) \ |
33 V(OptimizeFunction) \ | 33 V(OptimizeFunction) \ |
34 V(InvokeDartCode) \ | 34 V(InvokeDartCode) \ |
35 V(DebugStepCheck) \ | 35 V(DebugStepCheck) \ |
36 V(MegamorphicLookup) \ | 36 V(MegamorphicLookup) \ |
37 V(FixAllocationStubTarget) \ | 37 V(FixAllocationStubTarget) \ |
38 V(FixAllocateArrayStubTarget) \ | |
39 V(Deoptimize) \ | 38 V(Deoptimize) \ |
40 V(DeoptimizeLazy) \ | 39 V(DeoptimizeLazy) \ |
41 V(UnoptimizedIdenticalWithNumberCheck) \ | 40 V(UnoptimizedIdenticalWithNumberCheck) \ |
42 V(OptimizedIdenticalWithNumberCheck) \ | 41 V(OptimizedIdenticalWithNumberCheck) \ |
43 V(ICCallBreakpoint) \ | 42 V(ICCallBreakpoint) \ |
44 V(RuntimeCallBreakpoint) \ | 43 V(RuntimeCallBreakpoint) \ |
45 V(OneArgCheckInlineCache) \ | 44 V(OneArgCheckInlineCache) \ |
46 V(TwoArgsCheckInlineCache) \ | 45 V(TwoArgsCheckInlineCache) \ |
47 V(SmiAddInlineCache) \ | 46 V(SmiAddInlineCache) \ |
48 V(SmiSubInlineCache) \ | 47 V(SmiSubInlineCache) \ |
49 V(SmiEqualInlineCache) \ | 48 V(SmiEqualInlineCache) \ |
50 V(UnaryRangeCollectingInlineCache) \ | 49 V(UnaryRangeCollectingInlineCache) \ |
51 V(BinaryRangeCollectingInlineCache) \ | 50 V(BinaryRangeCollectingInlineCache) \ |
52 V(OneArgOptimizedCheckInlineCache) \ | 51 V(OneArgOptimizedCheckInlineCache) \ |
53 V(TwoArgsOptimizedCheckInlineCache) \ | 52 V(TwoArgsOptimizedCheckInlineCache) \ |
54 V(ZeroArgsUnoptimizedStaticCall) \ | 53 V(ZeroArgsUnoptimizedStaticCall) \ |
55 V(OneArgUnoptimizedStaticCall) \ | 54 V(OneArgUnoptimizedStaticCall) \ |
56 V(TwoArgsUnoptimizedStaticCall) \ | 55 V(TwoArgsUnoptimizedStaticCall) \ |
57 V(Subtype1TestCache) \ | 56 V(Subtype1TestCache) \ |
58 V(Subtype2TestCache) \ | 57 V(Subtype2TestCache) \ |
59 V(Subtype3TestCache) \ | 58 V(Subtype3TestCache) \ |
60 V(AllocateContext) \ | 59 V(AllocateContext) \ |
| 60 V(AllocateArray) \ |
| 61 V(CallClosureNoSuchMethod) |
61 | 62 |
62 // Is it permitted for the stubs above to refer to Object::null(), which is | 63 // Is it permitted for the stubs above to refer to Object::null(), which is |
63 // allocated in the VM isolate and shared across all isolates. | 64 // allocated in the VM isolate and shared across all isolates. |
64 // However, in cases where a simple GC-safe placeholder is needed on the stack, | 65 // However, in cases where a simple GC-safe placeholder is needed on the stack, |
65 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi | 66 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi |
66 // does not require relocation. | 67 // does not require relocation. |
67 | 68 |
68 // List of stubs created per isolate, these stubs could potentially contain | |
69 // embedded objects and hence cannot be shared across isolates. | |
70 #define STUB_CODE_LIST(V) \ | |
71 V(CallClosureNoSuchMethod) \ | |
72 | |
73 | |
74 // class StubEntry is used to describe stub methods generated in dart to | 69 // class StubEntry is used to describe stub methods generated in dart to |
75 // abstract out common code executed from generated dart code. | 70 // abstract out common code executed from generated dart code. |
76 class StubEntry { | 71 class StubEntry { |
77 public: | 72 public: |
78 explicit StubEntry(const Code& code); | 73 explicit StubEntry(const Code& code); |
79 ~StubEntry() {} | 74 ~StubEntry() {} |
80 | 75 |
81 const ExternalLabel& label() const { return label_; } | 76 const ExternalLabel& label() const { return label_; } |
82 uword EntryPoint() const { return entry_point_; } | 77 uword EntryPoint() const { return entry_point_; } |
83 RawCode* code() const { return code_; } | 78 RawCode* code() const { return code_; } |
84 intptr_t Size() const { return size_; } | 79 intptr_t Size() const { return size_; } |
85 | 80 |
86 // Visit all object pointers. | 81 // Visit all object pointers. |
87 void VisitObjectPointers(ObjectPointerVisitor* visitor); | 82 void VisitObjectPointers(ObjectPointerVisitor* visitor); |
88 | 83 |
89 private: | 84 private: |
90 RawCode* code_; | 85 RawCode* code_; |
91 uword entry_point_; | 86 uword entry_point_; |
92 intptr_t size_; | 87 intptr_t size_; |
93 ExternalLabel label_; | 88 ExternalLabel label_; |
94 | 89 |
95 DISALLOW_COPY_AND_ASSIGN(StubEntry); | 90 DISALLOW_COPY_AND_ASSIGN(StubEntry); |
96 }; | 91 }; |
97 | 92 |
98 | 93 |
99 // class StubCode is used to maintain the lifecycle of stubs. | 94 // class StubCode is used to maintain the lifecycle of stubs. |
100 class StubCode { | 95 class StubCode : public AllStatic { |
101 public: | 96 public: |
102 explicit StubCode(Isolate* isolate) | |
103 : | |
104 #define STUB_CODE_INITIALIZER(name) \ | |
105 name##_entry_(NULL), | |
106 STUB_CODE_LIST(STUB_CODE_INITIALIZER) | |
107 isolate_(isolate) {} | |
108 ~StubCode(); | |
109 | |
110 | |
111 // Generate all stubs which are shared across all isolates, this is done | 97 // Generate all stubs which are shared across all isolates, this is done |
112 // only once and the stub code resides in the vm_isolate heap. | 98 // only once and the stub code resides in the vm_isolate heap. |
113 static void InitOnce(); | 99 static void InitOnce(); |
114 | 100 |
115 // Generate all stubs which are generated on a per isolate basis as they | 101 // Generate all stubs which are generated on a per isolate basis as they |
116 // have embedded objects which are isolate specific. | 102 // have embedded objects which are isolate specific. |
117 static void Init(Isolate* isolate); | 103 static void Init(Isolate* isolate); |
118 | 104 |
119 static void VisitObjectPointers(ObjectPointerVisitor* visitor); | 105 static void VisitObjectPointers(ObjectPointerVisitor* visitor); |
120 | 106 |
121 // Check if specified pc is in the dart invocation stub used for | 107 // Check if specified pc is in the dart invocation stub used for |
122 // transitioning into dart code. | 108 // transitioning into dart code. |
123 static bool InInvocationStub(uword pc); | 109 static bool InInvocationStub(uword pc); |
124 | 110 |
125 static bool InInvocationStubForIsolate(Isolate* isolate, uword pc); | |
126 | |
127 // Check if the specified pc is in the jump to exception handler stub. | 111 // Check if the specified pc is in the jump to exception handler stub. |
128 static bool InJumpToExceptionHandlerStub(uword pc); | 112 static bool InJumpToExceptionHandlerStub(uword pc); |
129 | 113 |
130 // Returns NULL if no stub found. | 114 // Returns NULL if no stub found. |
131 static const char* NameOfStub(uword entry_point); | 115 static const char* NameOfStub(uword entry_point); |
132 | 116 |
133 // Define the shared stub code accessors. | 117 // Define the shared stub code accessors. |
134 #define STUB_CODE_ACCESSOR(name) \ | 118 #define STUB_CODE_ACCESSOR(name) \ |
135 static StubEntry* name##_entry() { \ | 119 static StubEntry* name##_entry() { \ |
136 return name##_entry_; \ | 120 return name##_entry_; \ |
137 } \ | 121 } \ |
138 static const ExternalLabel& name##Label() { \ | 122 static const ExternalLabel& name##Label() { \ |
139 return name##_entry()->label(); \ | 123 return name##_entry()->label(); \ |
140 } \ | 124 } \ |
141 static uword name##EntryPoint() { \ | 125 static uword name##EntryPoint() { \ |
142 return name##_entry()->EntryPoint(); \ | 126 return name##_entry()->EntryPoint(); \ |
143 } \ | 127 } \ |
144 static intptr_t name##Size() { \ | 128 static intptr_t name##Size() { \ |
145 return name##_entry()->Size(); \ | 129 return name##_entry()->Size(); \ |
146 } | 130 } |
147 VM_STUB_CODE_LIST(STUB_CODE_ACCESSOR); | 131 VM_STUB_CODE_LIST(STUB_CODE_ACCESSOR); |
148 #undef STUB_CODE_ACCESSOR | 132 #undef STUB_CODE_ACCESSOR |
149 | 133 |
150 // Define the per-isolate stub code accessors. | 134 static RawCode* GetAllocationStubForClass(const Class& cls); |
151 #define STUB_CODE_ACCESSOR(name) \ | |
152 StubEntry* name##_entry() { \ | |
153 return name##_entry_; \ | |
154 } \ | |
155 const ExternalLabel& name##Label() { \ | |
156 return name##_entry()->label(); \ | |
157 } \ | |
158 uword name##EntryPoint() { \ | |
159 return name##_entry()->EntryPoint(); \ | |
160 } \ | |
161 intptr_t name##Size() { \ | |
162 return name##_entry()->Size(); \ | |
163 } | |
164 STUB_CODE_LIST(STUB_CODE_ACCESSOR); | |
165 #undef STUB_CODE_ACCESSOR | |
166 | 135 |
167 static RawCode* GetAllocationStubForClass(const Class& cls); | 136 static uword UnoptimizedStaticCallEntryPoint(intptr_t num_args_tested); |
168 RawCode* GetAllocateArrayStub(); | |
169 | |
170 uword UnoptimizedStaticCallEntryPoint(intptr_t num_args_tested); | |
171 | 137 |
172 static const intptr_t kNoInstantiator = 0; | 138 static const intptr_t kNoInstantiator = 0; |
173 | 139 |
174 static void EmitMegamorphicLookup( | 140 static void EmitMegamorphicLookup( |
175 Assembler*, Register recv, Register cache, Register target); | 141 Assembler*, Register recv, Register cache, Register target); |
176 | 142 |
177 private: | 143 private: |
178 void GenerateStubsFor(Isolate* isolate); | |
179 | |
180 friend class MegamorphicCacheTable; | 144 friend class MegamorphicCacheTable; |
181 | 145 |
182 static const intptr_t kStubCodeSize = 4 * KB; | 146 static const intptr_t kStubCodeSize = 4 * KB; |
183 | 147 |
184 #define STUB_CODE_GENERATE(name) \ | 148 #define STUB_CODE_GENERATE(name) \ |
185 static void Generate##name##Stub(Assembler* assembler); | 149 static void Generate##name##Stub(Assembler* assembler); |
186 VM_STUB_CODE_LIST(STUB_CODE_GENERATE); | 150 VM_STUB_CODE_LIST(STUB_CODE_GENERATE); |
187 STUB_CODE_LIST(STUB_CODE_GENERATE); | |
188 #undef STUB_CODE_GENERATE | 151 #undef STUB_CODE_GENERATE |
189 | 152 |
190 #define STUB_CODE_ENTRY(name) \ | 153 #define STUB_CODE_ENTRY(name) \ |
191 static StubEntry* name##_entry_; | 154 static StubEntry* name##_entry_; |
192 VM_STUB_CODE_LIST(STUB_CODE_ENTRY); | 155 VM_STUB_CODE_LIST(STUB_CODE_ENTRY); |
193 #undef STUB_CODE_ENTRY | 156 #undef STUB_CODE_ENTRY |
194 | 157 |
195 #define STUB_CODE_ENTRY(name) \ | |
196 StubEntry* name##_entry_; | |
197 STUB_CODE_LIST(STUB_CODE_ENTRY); | |
198 #undef STUB_CODE_ENTRY | |
199 Isolate* isolate_; | |
200 | |
201 enum RangeCollectionMode { | 158 enum RangeCollectionMode { |
202 kCollectRanges, | 159 kCollectRanges, |
203 kIgnoreRanges | 160 kIgnoreRanges |
204 }; | 161 }; |
205 | 162 |
206 // Generate the stub and finalize the generated code into the stub | 163 // Generate the stub and finalize the generated code into the stub |
207 // code executable area. | 164 // code executable area. |
208 static RawCode* Generate(const char* name, | 165 static RawCode* Generate(const char* name, |
209 void (*GenerateStub)(Assembler* assembler)); | 166 void (*GenerateStub)(Assembler* assembler)); |
210 | 167 |
211 static void GenerateMegamorphicMissStub(Assembler* assembler); | 168 static void GenerateMegamorphicMissStub(Assembler* assembler); |
212 static void GenerateAllocationStubForClass( | 169 static void GenerateAllocationStubForClass( |
213 Assembler* assembler, const Class& cls, | 170 Assembler* assembler, const Class& cls, |
214 uword* entry_patch_offset, uword* patch_code_pc_offset); | 171 uword* entry_patch_offset, uword* patch_code_pc_offset); |
215 static void GeneratePatchableAllocateArrayStub(Assembler* assembler, | |
216 uword* entry_patch_offset, uword* patch_code_pc_offset); | |
217 static void GenerateNArgsCheckInlineCacheStub( | 172 static void GenerateNArgsCheckInlineCacheStub( |
218 Assembler* assembler, | 173 Assembler* assembler, |
219 intptr_t num_args, | 174 intptr_t num_args, |
220 const RuntimeEntry& handle_ic_miss, | 175 const RuntimeEntry& handle_ic_miss, |
221 Token::Kind kind, | 176 Token::Kind kind, |
222 RangeCollectionMode range_collection_mode, | 177 RangeCollectionMode range_collection_mode, |
223 bool optimized = false); | 178 bool optimized = false); |
224 static void GenerateUsageCounterIncrement(Assembler* assembler, | 179 static void GenerateUsageCounterIncrement(Assembler* assembler, |
225 Register temp_reg); | 180 Register temp_reg); |
226 static void GenerateOptimizedUsageCounterIncrement(Assembler* assembler); | 181 static void GenerateOptimizedUsageCounterIncrement(Assembler* assembler); |
227 | 182 |
228 static void GenerateIdenticalWithNumberCheckStub( | 183 static void GenerateIdenticalWithNumberCheckStub( |
229 Assembler* assembler, | 184 Assembler* assembler, |
230 const Register left, | 185 const Register left, |
231 const Register right, | 186 const Register right, |
232 const Register temp1 = kNoRegister, | 187 const Register temp1 = kNoRegister, |
233 const Register temp2 = kNoRegister); | 188 const Register temp2 = kNoRegister); |
234 }; | 189 }; |
235 | 190 |
236 } // namespace dart | 191 } // namespace dart |
237 | 192 |
238 #endif // VM_STUB_CODE_H_ | 193 #endif // VM_STUB_CODE_H_ |
OLD | NEW |