OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 #ifndef VM_FLOW_GRAPH_COMPILER_X64_H_ | |
6 #define VM_FLOW_GRAPH_COMPILER_X64_H_ | |
7 | |
8 #ifndef VM_FLOW_GRAPH_COMPILER_H_ | |
9 #error Include flow_graph_compiler.h instead of flow_graph_compiler_x64.h. | |
10 #endif | |
11 | |
12 namespace dart { | |
13 | |
14 class Code; | |
15 class FlowGraph; | |
16 template <typename T> class GrowableArray; | |
17 class ParsedFunction; | |
18 | |
19 class FlowGraphCompiler : public ValueObject { | |
20 private: | |
21 struct BlockInfo : public ZoneAllocated { | |
22 public: | |
23 BlockInfo() : label() { } | |
24 Label label; | |
25 }; | |
26 | |
27 public: | |
28 FlowGraphCompiler(Assembler* assembler, | |
29 const FlowGraph& flow_graph, | |
30 bool is_optimizing); | |
31 | |
32 ~FlowGraphCompiler(); | |
33 | |
34 static bool SupportsUnboxedMints(); | |
35 | |
36 // Accessors. | |
37 Assembler* assembler() const { return assembler_; } | |
38 const ParsedFunction& parsed_function() const { return parsed_function_; } | |
39 const GrowableArray<BlockEntryInstr*>& block_order() const { | |
40 return block_order_; | |
41 } | |
42 DescriptorList* pc_descriptors_list() const { | |
43 return pc_descriptors_list_; | |
44 } | |
45 BlockEntryInstr* current_block() const { return current_block_; } | |
46 void set_current_block(BlockEntryInstr* value) { | |
47 current_block_ = value; | |
48 } | |
49 static bool CanOptimize(); | |
50 bool CanOptimizeFunction() const; | |
51 bool is_optimizing() const { return is_optimizing_; } | |
52 | |
53 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; } | |
54 ParallelMoveResolver* parallel_move_resolver() { | |
55 return ¶llel_move_resolver_; | |
56 } | |
57 | |
58 // Constructor is lighweight, major initialization work should occur here. | |
59 // This makes it easier to measure time spent in the compiler. | |
60 void InitCompiler(); | |
61 | |
62 void CompileGraph(); | |
63 | |
64 void VisitBlocks(); | |
65 | |
66 // Bail out of the flow graph compiler. Does not return to the caller. | |
67 void Bailout(const char* reason); | |
68 | |
69 void LoadDoubleOrSmiToXmm(XmmRegister result, | |
70 Register reg, | |
71 Register temp, | |
72 Label* not_double_or_smi); | |
73 | |
74 // Returns 'true' if code generation for this function is complete, i.e., | |
75 // no fall-through to regular code is needed. | |
76 bool TryIntrinsify(); | |
77 | |
78 void GenerateCallRuntime(intptr_t token_pos, | |
79 const RuntimeEntry& entry, | |
80 LocationSummary* locs); | |
81 | |
82 void GenerateCall(intptr_t token_pos, | |
83 const ExternalLabel* label, | |
84 PcDescriptors::Kind kind, | |
85 LocationSummary* locs); | |
86 | |
87 void GenerateDartCall(intptr_t deopt_id, | |
88 intptr_t token_pos, | |
89 const ExternalLabel* label, | |
90 PcDescriptors::Kind kind, | |
91 LocationSummary* locs); | |
92 | |
93 void GenerateAssertAssignable(intptr_t token_pos, | |
94 const AbstractType& dst_type, | |
95 const String& dst_name, | |
96 LocationSummary* locs); | |
97 | |
98 void GenerateInstanceOf(intptr_t token_pos, | |
99 const AbstractType& type, | |
100 bool negate_result, | |
101 LocationSummary* locs); | |
102 | |
103 void GenerateInstanceCall(intptr_t deopt_id, | |
104 intptr_t token_pos, | |
105 intptr_t argument_count, | |
106 const Array& argument_names, | |
107 LocationSummary* locs, | |
108 const ICData& ic_data); | |
109 | |
110 void GenerateStaticCall(intptr_t deopt_id, | |
111 intptr_t token_pos, | |
112 const Function& function, | |
113 intptr_t argument_count, | |
114 const Array& argument_names, | |
115 LocationSummary* locs); | |
116 | |
117 void GenerateNumberTypeCheck(Register kClassIdReg, | |
118 const AbstractType& type, | |
119 Label* is_instance_lbl, | |
120 Label* is_not_instance_lbl); | |
121 void GenerateStringTypeCheck(Register kClassIdReg, | |
122 Label* is_instance_lbl, | |
123 Label* is_not_instance_lbl); | |
124 void GenerateListTypeCheck(Register kClassIdReg, | |
125 Label* is_instance_lbl); | |
126 | |
127 void EmitComment(Instruction* instr); | |
128 | |
129 void EmitOptimizedInstanceCall(ExternalLabel* target_label, | |
130 const ICData& ic_data, | |
131 const Array& arguments_descriptor, | |
132 intptr_t argument_count, | |
133 intptr_t deopt_id, | |
134 intptr_t token_pos, | |
135 LocationSummary* locs); | |
136 | |
137 void EmitInstanceCall(ExternalLabel* target_label, | |
138 const ICData& ic_data, | |
139 const Array& arguments_descriptor, | |
140 intptr_t argument_count, | |
141 intptr_t deopt_id, | |
142 intptr_t token_pos, | |
143 LocationSummary* locs); | |
144 | |
145 void EmitMegamorphicInstanceCall(const ICData& ic_data, | |
146 const Array& arguments_descriptor, | |
147 intptr_t argument_count, | |
148 intptr_t deopt_id, | |
149 intptr_t token_pos, | |
150 LocationSummary* locs); | |
151 | |
152 void EmitTestAndCall(const ICData& ic_data, | |
153 Register class_id_reg, | |
154 intptr_t arg_count, | |
155 const Array& arg_names, | |
156 Label* deopt, | |
157 intptr_t deopt_id, | |
158 intptr_t token_index, | |
159 LocationSummary* locs); | |
160 | |
161 void EmitDoubleCompareBranch(Condition true_condition, | |
162 XmmRegister left, | |
163 XmmRegister right, | |
164 BranchInstr* branch); | |
165 void EmitDoubleCompareBool(Condition true_condition, | |
166 XmmRegister left, | |
167 XmmRegister right, | |
168 Register result); | |
169 | |
170 void EmitEqualityRegConstCompare(Register reg, | |
171 const Object& obj, | |
172 bool needs_number_check); | |
173 void EmitEqualityRegRegCompare(Register left, | |
174 Register right, | |
175 bool needs_number_check); | |
176 void EmitEqualityRegConstCompare(Register reg, const Object& obj); | |
177 // Implement equality: if any of the arguments is null do identity check. | |
178 // Fallthrough calls super equality. | |
179 void EmitSuperEqualityCallPrologue(Register result, Label* skip_call); | |
180 | |
181 intptr_t StackSize() const; | |
182 | |
183 // Returns assembler label associated with the given block entry. | |
184 Label* GetBlockLabel(BlockEntryInstr* block_entry) const; | |
185 | |
186 // Returns true if there is a next block after the current one in | |
187 // the block order and if it is the given block. | |
188 bool IsNextBlock(BlockEntryInstr* block_entry) const; | |
189 | |
190 void AddExceptionHandler(intptr_t try_index, | |
191 intptr_t outer_try_index, | |
192 intptr_t pc_offset, | |
193 const Array& handler_types); | |
194 void AddCurrentDescriptor(PcDescriptors::Kind kind, | |
195 intptr_t deopt_id, | |
196 intptr_t token_pos); | |
197 | |
198 void RecordSafepoint(LocationSummary* locs); | |
199 | |
200 Label* AddDeoptStub(intptr_t deopt_id, DeoptReasonId reason); | |
201 | |
202 void AddDeoptIndexAtCall(intptr_t deopt_id, intptr_t token_pos); | |
203 | |
204 void AddSlowPathCode(SlowPathCode* slow_path); | |
205 | |
206 void FinalizeExceptionHandlers(const Code& code); | |
207 void FinalizePcDescriptors(const Code& code); | |
208 void FinalizeDeoptInfo(const Code& code); | |
209 void FinalizeStackmaps(const Code& code); | |
210 void FinalizeVarDescriptors(const Code& code); | |
211 void FinalizeComments(const Code& code); | |
212 void FinalizeStaticCallTargetsTable(const Code& code); | |
213 | |
214 const Class& double_class() const { return double_class_; } | |
215 | |
216 // Returns true if the compiled function has a finally clause. | |
217 bool HasFinally() const; | |
218 | |
219 static const int kLocalsOffsetFromFP = (-1 * kWordSize); | |
220 | |
221 void SaveLiveRegisters(LocationSummary* locs); | |
222 void RestoreLiveRegisters(LocationSummary* locs); | |
223 | |
224 intptr_t CurrentTryIndex() const { | |
225 if (current_block_ == NULL) { | |
226 return CatchClauseNode::kInvalidTryIndex; | |
227 } | |
228 return current_block_->try_index(); | |
229 } | |
230 | |
231 bool may_reoptimize() const { return may_reoptimize_; } | |
232 | |
233 static Condition FlipCondition(Condition condition); | |
234 | |
235 static bool EvaluateCondition(Condition condition, intptr_t l, intptr_t r); | |
236 | |
237 // Array/list element address computations. | |
238 static intptr_t DataOffsetFor(intptr_t cid); | |
239 static intptr_t ElementSizeFor(intptr_t cid); | |
240 static FieldAddress ElementAddressForIntIndex(intptr_t cid, | |
241 Register array, | |
242 intptr_t offset); | |
243 static FieldAddress ElementAddressForRegIndex(intptr_t cid, | |
244 Register array, | |
245 Register index); | |
246 static Address ExternalElementAddressForIntIndex(intptr_t cid, | |
247 Register array, | |
248 intptr_t offset); | |
249 static Address ExternalElementAddressForRegIndex(intptr_t cid, | |
250 Register array, | |
251 Register index); | |
252 | |
253 private: | |
254 void EmitFrameEntry(); | |
255 | |
256 void AddStaticCallTarget(const Function& function); | |
257 | |
258 void GenerateDeferredCode(); | |
259 | |
260 void EmitInstructionPrologue(Instruction* instr); | |
261 void EmitInstructionEpilogue(Instruction* instr); | |
262 | |
263 // Emit code to load a Value into register 'dst'. | |
264 void LoadValue(Register dst, Value* value); | |
265 | |
266 void EmitStaticCall(const Function& function, | |
267 const Array& arguments_descriptor, | |
268 intptr_t argument_count, | |
269 intptr_t deopt_id, | |
270 intptr_t token_pos, | |
271 LocationSummary* locs); | |
272 | |
273 // Type checking helper methods. | |
274 void CheckClassIds(Register class_id_reg, | |
275 const GrowableArray<intptr_t>& class_ids, | |
276 Label* is_instance_lbl, | |
277 Label* is_not_instance_lbl); | |
278 | |
279 RawSubtypeTestCache* GenerateInlineInstanceof(intptr_t token_pos, | |
280 const AbstractType& type, | |
281 Label* is_instance_lbl, | |
282 Label* is_not_instance_lbl); | |
283 | |
284 RawSubtypeTestCache* GenerateInstantiatedTypeWithArgumentsTest( | |
285 intptr_t token_pos, | |
286 const AbstractType& dst_type, | |
287 Label* is_instance_lbl, | |
288 Label* is_not_instance_lbl); | |
289 | |
290 bool GenerateInstantiatedTypeNoArgumentsTest(intptr_t token_pos, | |
291 const AbstractType& dst_type, | |
292 Label* is_instance_lbl, | |
293 Label* is_not_instance_lbl); | |
294 | |
295 RawSubtypeTestCache* GenerateUninstantiatedTypeTest( | |
296 intptr_t token_pos, | |
297 const AbstractType& dst_type, | |
298 Label* is_instance_lbl, | |
299 Label* is_not_instance_label); | |
300 | |
301 RawSubtypeTestCache* GenerateSubtype1TestCacheLookup( | |
302 intptr_t token_pos, | |
303 const Class& type_class, | |
304 Label* is_instance_lbl, | |
305 Label* is_not_instance_lbl); | |
306 | |
307 enum TypeTestStubKind { | |
308 kTestTypeOneArg, | |
309 kTestTypeTwoArgs, | |
310 kTestTypeThreeArgs, | |
311 }; | |
312 | |
313 RawSubtypeTestCache* GenerateCallSubtypeTestStub(TypeTestStubKind test_kind, | |
314 Register instance_reg, | |
315 Register type_arguments_reg, | |
316 Register temp_reg, | |
317 Label* is_instance_lbl, | |
318 Label* is_not_instance_lbl); | |
319 | |
320 // Returns true if checking against this type is a direct class id comparison. | |
321 bool TypeCheckAsClassEquality(const AbstractType& type); | |
322 | |
323 void GenerateBoolToJump(Register bool_reg, Label* is_true, Label* is_false); | |
324 | |
325 void CopyParameters(); | |
326 | |
327 void GenerateInlinedGetter(intptr_t offset); | |
328 void GenerateInlinedSetter(intptr_t offset); | |
329 | |
330 // Map a block number in a forward iteration into the block number in the | |
331 // corresponding reverse iteration. Used to obtain an index into | |
332 // block_order for reverse iterations. | |
333 intptr_t reverse_index(intptr_t index) const { | |
334 return block_order_.length() - index - 1; | |
335 } | |
336 | |
337 // Perform a greedy local register allocation. Consider all registers free. | |
338 void AllocateRegistersLocally(Instruction* instr); | |
339 | |
340 class Assembler* assembler_; | |
341 const ParsedFunction& parsed_function_; | |
342 const GrowableArray<BlockEntryInstr*>& block_order_; | |
343 | |
344 // Compiler specific per-block state. Indexed by postorder block number | |
345 // for convenience. This is not the block's index in the block order, | |
346 // which is reverse postorder. | |
347 BlockEntryInstr* current_block_; | |
348 ExceptionHandlerList* exception_handlers_list_; | |
349 DescriptorList* pc_descriptors_list_; | |
350 StackmapTableBuilder* stackmap_table_builder_; | |
351 GrowableArray<BlockInfo*> block_info_; | |
352 GrowableArray<CompilerDeoptInfo*> deopt_infos_; | |
353 GrowableArray<SlowPathCode*> slow_path_code_; | |
354 // Stores: [code offset, function, null(code)]. | |
355 const GrowableObjectArray& static_calls_target_table_; | |
356 const bool is_optimizing_; | |
357 // Set to true if optimized code has IC calls. | |
358 bool may_reoptimize_; | |
359 | |
360 const Class& double_class_; | |
361 | |
362 ParallelMoveResolver parallel_move_resolver_; | |
363 | |
364 // Currently instructions generate deopt stubs internally by | |
365 // calling AddDeoptStub. To communicate deoptimization environment | |
366 // that should be used when deoptimizing we store it in this variable. | |
367 // In future AddDeoptStub should be moved out of the instruction template. | |
368 Environment* pending_deoptimization_env_; | |
369 | |
370 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); | |
371 }; | |
372 | |
373 } // namespace dart | |
374 | |
375 #endif // VM_FLOW_GRAPH_COMPILER_X64_H_ | |
OLD | NEW |