OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_FLOW_GRAPH_COMPILER_ARM_H_ | 5 #ifndef VM_FLOW_GRAPH_COMPILER_ARM_H_ |
6 #define VM_FLOW_GRAPH_COMPILER_ARM_H_ | 6 #define VM_FLOW_GRAPH_COMPILER_ARM_H_ |
7 | 7 |
8 #ifndef VM_FLOW_GRAPH_COMPILER_H_ | 8 #ifndef VM_FLOW_GRAPH_COMPILER_H_ |
9 #error Include flow_graph_compiler.h instead of flow_graph_compiler_arm.h. | 9 #error Include flow_graph_compiler.h instead of flow_graph_compiler_arm.h. |
10 #endif | 10 #endif |
11 | 11 |
12 #include "vm/flow_graph.h" | |
13 #include "vm/intermediate_language.h" | |
14 | |
15 namespace dart { | 12 namespace dart { |
16 | 13 |
17 class Assembler; | |
18 class Code; | 14 class Code; |
| 15 class FlowGraph; |
19 template <typename T> class GrowableArray; | 16 template <typename T> class GrowableArray; |
20 class ParsedFunction; | 17 class ParsedFunction; |
21 | 18 |
22 // Stubbed out implementation of graph compiler, bails out immediately if | 19 class FlowGraphCompiler : public ValueObject { |
23 // CompileGraph is called. The rest of the public API is UNIMPLEMENTED. | 20 private: |
24 class FlowGraphCompiler : public FlowGraphVisitor { | 21 struct BlockInfo : public ZoneAllocated { |
| 22 public: |
| 23 BlockInfo() : label() { } |
| 24 Label label; |
| 25 }; |
| 26 |
25 public: | 27 public: |
26 FlowGraphCompiler(Assembler* assembler, | 28 FlowGraphCompiler(Assembler* assembler, |
27 const FlowGraph& flow_graph, | 29 const FlowGraph& flow_graph, |
28 bool is_optimizing) | 30 bool is_optimizing); |
29 : FlowGraphVisitor(flow_graph.reverse_postorder()), | 31 |
30 parsed_function_(flow_graph.parsed_function()), | 32 ~FlowGraphCompiler(); |
31 is_optimizing_(is_optimizing) { | 33 |
32 } | 34 static bool SupportsUnboxedMints(); |
33 | 35 |
34 virtual ~FlowGraphCompiler() { } | 36 // Accessors. |
35 | 37 Assembler* assembler() const { return assembler_; } |
| 38 const ParsedFunction& parsed_function() const { return parsed_function_; } |
| 39 const GrowableArray<BlockEntryInstr*>& block_order() const { |
| 40 return block_order_; |
| 41 } |
| 42 DescriptorList* pc_descriptors_list() const { |
| 43 return pc_descriptors_list_; |
| 44 } |
| 45 BlockEntryInstr* current_block() const { return current_block_; } |
| 46 void set_current_block(BlockEntryInstr* value) { |
| 47 current_block_ = value; |
| 48 } |
36 static bool CanOptimize(); | 49 static bool CanOptimize(); |
37 bool CanOptimizeFunction() const; | 50 bool CanOptimizeFunction() const; |
38 | 51 |
| 52 bool is_optimizing() const { return is_optimizing_; } |
| 53 |
| 54 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; } |
| 55 ParallelMoveResolver* parallel_move_resolver() { |
| 56 return ¶llel_move_resolver_; |
| 57 } |
| 58 |
| 59 // Constructor is lighweight, major initialization work should occur here. |
| 60 // This makes it easier to measure time spent in the compiler. |
| 61 void InitCompiler(); |
| 62 |
39 void CompileGraph(); | 63 void CompileGraph(); |
40 | 64 |
| 65 void VisitBlocks(); |
| 66 |
| 67 // Bail out of the flow graph compiler. Does not return to the caller. |
| 68 void Bailout(const char* reason); |
| 69 |
| 70 void LoadDoubleOrSmiToFpu(FpuRegister result, |
| 71 Register reg, |
| 72 Register temp, |
| 73 Label* not_double_or_smi); |
| 74 |
| 75 // Returns 'true' if code generation for this function is complete, i.e., |
| 76 // no fall-through to regular code is needed. |
| 77 bool TryIntrinsify(); |
| 78 |
| 79 void GenerateCallRuntime(intptr_t token_pos, |
| 80 const RuntimeEntry& entry, |
| 81 LocationSummary* locs); |
| 82 |
| 83 void GenerateCall(intptr_t token_pos, |
| 84 const ExternalLabel* label, |
| 85 PcDescriptors::Kind kind, |
| 86 LocationSummary* locs); |
| 87 |
| 88 void GenerateDartCall(intptr_t deopt_id, |
| 89 intptr_t token_pos, |
| 90 const ExternalLabel* label, |
| 91 PcDescriptors::Kind kind, |
| 92 LocationSummary* locs); |
| 93 |
| 94 void GenerateAssertAssignable(intptr_t token_pos, |
| 95 const AbstractType& dst_type, |
| 96 const String& dst_name, |
| 97 LocationSummary* locs); |
| 98 |
| 99 void GenerateInstanceOf(intptr_t token_pos, |
| 100 const AbstractType& type, |
| 101 bool negate_result, |
| 102 LocationSummary* locs); |
| 103 |
| 104 void GenerateInstanceCall(intptr_t deopt_id, |
| 105 intptr_t token_pos, |
| 106 intptr_t argument_count, |
| 107 const Array& argument_names, |
| 108 LocationSummary* locs, |
| 109 const ICData& ic_data); |
| 110 |
| 111 void GenerateStaticCall(intptr_t deopt_id, |
| 112 intptr_t token_pos, |
| 113 const Function& function, |
| 114 intptr_t argument_count, |
| 115 const Array& argument_names, |
| 116 LocationSummary* locs); |
| 117 |
| 118 void GenerateNumberTypeCheck(Register kClassIdReg, |
| 119 const AbstractType& type, |
| 120 Label* is_instance_lbl, |
| 121 Label* is_not_instance_lbl); |
| 122 void GenerateStringTypeCheck(Register kClassIdReg, |
| 123 Label* is_instance_lbl, |
| 124 Label* is_not_instance_lbl); |
| 125 void GenerateListTypeCheck(Register kClassIdReg, |
| 126 Label* is_instance_lbl); |
| 127 |
| 128 void EmitComment(Instruction* instr); |
| 129 |
| 130 void EmitOptimizedInstanceCall(ExternalLabel* target_label, |
| 131 const ICData& ic_data, |
| 132 const Array& arguments_descriptor, |
| 133 intptr_t argument_count, |
| 134 intptr_t deopt_id, |
| 135 intptr_t token_pos, |
| 136 LocationSummary* locs); |
| 137 |
| 138 void EmitInstanceCall(ExternalLabel* target_label, |
| 139 const ICData& ic_data, |
| 140 const Array& arguments_descriptor, |
| 141 intptr_t argument_count, |
| 142 intptr_t deopt_id, |
| 143 intptr_t token_pos, |
| 144 LocationSummary* locs); |
| 145 |
| 146 void EmitMegamorphicInstanceCall(const ICData& ic_data, |
| 147 const Array& arguments_descriptor, |
| 148 intptr_t argument_count, |
| 149 intptr_t deopt_id, |
| 150 intptr_t token_pos, |
| 151 LocationSummary* locs); |
| 152 |
| 153 void EmitTestAndCall(const ICData& ic_data, |
| 154 Register class_id_reg, |
| 155 intptr_t arg_count, |
| 156 const Array& arg_names, |
| 157 Label* deopt, |
| 158 intptr_t deopt_id, |
| 159 intptr_t token_index, |
| 160 LocationSummary* locs); |
| 161 |
| 162 void EmitDoubleCompareBranch(Condition true_condition, |
| 163 FpuRegister left, |
| 164 FpuRegister right, |
| 165 BranchInstr* branch); |
| 166 void EmitDoubleCompareBool(Condition true_condition, |
| 167 FpuRegister left, |
| 168 FpuRegister right, |
| 169 Register result); |
| 170 |
| 171 void EmitEqualityRegConstCompare(Register reg, |
| 172 const Object& obj, |
| 173 bool needs_number_check); |
| 174 void EmitEqualityRegRegCompare(Register left, |
| 175 Register right, |
| 176 bool needs_number_check); |
| 177 // Implement equality: if any of the arguments is null do identity check. |
| 178 // Fallthrough calls super equality. |
| 179 void EmitSuperEqualityCallPrologue(Register result, Label* skip_call); |
| 180 |
| 181 intptr_t StackSize() const; |
| 182 |
| 183 // Returns assembler label associated with the given block entry. |
| 184 Label* GetBlockLabel(BlockEntryInstr* block_entry) const; |
| 185 |
| 186 // Returns true if there is a next block after the current one in |
| 187 // the block order and if it is the given block. |
| 188 bool IsNextBlock(BlockEntryInstr* block_entry) const; |
| 189 |
| 190 void AddExceptionHandler(intptr_t try_index, |
| 191 intptr_t outer_try_index, |
| 192 intptr_t pc_offset, |
| 193 const Array& handler_types); |
| 194 void AddCurrentDescriptor(PcDescriptors::Kind kind, |
| 195 intptr_t deopt_id, |
| 196 intptr_t token_pos); |
| 197 |
| 198 void RecordSafepoint(LocationSummary* locs); |
| 199 |
| 200 Label* AddDeoptStub(intptr_t deopt_id, DeoptReasonId reason); |
| 201 |
| 202 void AddDeoptIndexAtCall(intptr_t deopt_id, intptr_t token_pos); |
| 203 |
| 204 void AddSlowPathCode(SlowPathCode* slow_path); |
| 205 |
| 206 void FinalizeExceptionHandlers(const Code& code); |
41 void FinalizePcDescriptors(const Code& code); | 207 void FinalizePcDescriptors(const Code& code); |
| 208 void FinalizeDeoptInfo(const Code& code); |
42 void FinalizeStackmaps(const Code& code); | 209 void FinalizeStackmaps(const Code& code); |
43 void FinalizeVarDescriptors(const Code& code); | 210 void FinalizeVarDescriptors(const Code& code); |
44 void FinalizeExceptionHandlers(const Code& code); | |
45 void FinalizeComments(const Code& code); | 211 void FinalizeComments(const Code& code); |
| 212 void FinalizeStaticCallTargetsTable(const Code& code); |
| 213 |
| 214 const Class& double_class() const { return double_class_; } |
| 215 |
| 216 void SaveLiveRegisters(LocationSummary* locs); |
| 217 void RestoreLiveRegisters(LocationSummary* locs); |
| 218 |
| 219 // Returns true if the compiled function has a finally clause. |
| 220 bool HasFinally() const; |
| 221 |
| 222 intptr_t CurrentTryIndex() const { |
| 223 if (current_block_ == NULL) { |
| 224 return CatchClauseNode::kInvalidTryIndex; |
| 225 } |
| 226 return current_block_->try_index(); |
| 227 } |
| 228 |
| 229 bool may_reoptimize() const { return may_reoptimize_; } |
| 230 |
| 231 static const int kLocalsOffsetFromFP = (-1 * kWordSize); |
| 232 |
| 233 static Condition FlipCondition(Condition condition); |
| 234 |
| 235 static bool EvaluateCondition(Condition condition, intptr_t l, intptr_t r); |
| 236 |
| 237 // Array/list element address computations. |
| 238 static intptr_t DataOffsetFor(intptr_t cid); |
| 239 static intptr_t ElementSizeFor(intptr_t cid); |
| 240 static FieldAddress ElementAddressForIntIndex(intptr_t cid, |
| 241 Register array, |
| 242 intptr_t offset); |
| 243 static FieldAddress ElementAddressForRegIndex(intptr_t cid, |
| 244 Register array, |
| 245 Register index); |
46 | 246 |
47 private: | 247 private: |
48 // Bail out of the flow graph compiler. Does not return to the caller. | 248 void EmitFrameEntry(); |
49 void Bailout(const char* reason); | 249 |
50 | 250 void AddStaticCallTarget(const Function& function); |
| 251 |
| 252 void GenerateDeferredCode(); |
| 253 |
| 254 void EmitInstructionPrologue(Instruction* instr); |
| 255 void EmitInstructionEpilogue(Instruction* instr); |
| 256 |
| 257 // Emit code to load a Value into register 'dst'. |
| 258 void LoadValue(Register dst, Value* value); |
| 259 |
| 260 void EmitStaticCall(const Function& function, |
| 261 const Array& arguments_descriptor, |
| 262 intptr_t argument_count, |
| 263 intptr_t deopt_id, |
| 264 intptr_t token_pos, |
| 265 LocationSummary* locs); |
| 266 |
| 267 // Type checking helper methods. |
| 268 void CheckClassIds(Register class_id_reg, |
| 269 const GrowableArray<intptr_t>& class_ids, |
| 270 Label* is_instance_lbl, |
| 271 Label* is_not_instance_lbl); |
| 272 |
| 273 RawSubtypeTestCache* GenerateInlineInstanceof(intptr_t token_pos, |
| 274 const AbstractType& type, |
| 275 Label* is_instance_lbl, |
| 276 Label* is_not_instance_lbl); |
| 277 |
| 278 RawSubtypeTestCache* GenerateInstantiatedTypeWithArgumentsTest( |
| 279 intptr_t token_pos, |
| 280 const AbstractType& dst_type, |
| 281 Label* is_instance_lbl, |
| 282 Label* is_not_instance_lbl); |
| 283 |
| 284 bool GenerateInstantiatedTypeNoArgumentsTest(intptr_t token_pos, |
| 285 const AbstractType& dst_type, |
| 286 Label* is_instance_lbl, |
| 287 Label* is_not_instance_lbl); |
| 288 |
| 289 RawSubtypeTestCache* GenerateUninstantiatedTypeTest( |
| 290 intptr_t token_pos, |
| 291 const AbstractType& dst_type, |
| 292 Label* is_instance_lbl, |
| 293 Label* is_not_instance_label); |
| 294 |
| 295 RawSubtypeTestCache* GenerateSubtype1TestCacheLookup( |
| 296 intptr_t token_pos, |
| 297 const Class& type_class, |
| 298 Label* is_instance_lbl, |
| 299 Label* is_not_instance_lbl); |
| 300 |
| 301 enum TypeTestStubKind { |
| 302 kTestTypeOneArg, |
| 303 kTestTypeTwoArgs, |
| 304 kTestTypeThreeArgs, |
| 305 }; |
| 306 |
| 307 RawSubtypeTestCache* GenerateCallSubtypeTestStub(TypeTestStubKind test_kind, |
| 308 Register instance_reg, |
| 309 Register type_arguments_reg, |
| 310 Register temp_reg, |
| 311 Label* is_instance_lbl, |
| 312 Label* is_not_instance_lbl); |
| 313 |
| 314 // Returns true if checking against this type is a direct class id comparison. |
| 315 bool TypeCheckAsClassEquality(const AbstractType& type); |
| 316 |
| 317 void GenerateBoolToJump(Register bool_reg, Label* is_true, Label* is_false); |
| 318 |
| 319 void CopyParameters(); |
| 320 |
| 321 void GenerateInlinedGetter(intptr_t offset); |
| 322 void GenerateInlinedSetter(intptr_t offset); |
| 323 |
| 324 // Perform a greedy local register allocation. Consider all registers free. |
| 325 void AllocateRegistersLocally(Instruction* instr); |
| 326 |
| 327 // Map a block number in a forward iteration into the block number in the |
| 328 // corresponding reverse iteration. Used to obtain an index into |
| 329 // block_order for reverse iterations. |
| 330 intptr_t reverse_index(intptr_t index) const { |
| 331 return block_order_.length() - index - 1; |
| 332 } |
| 333 |
| 334 class Assembler* assembler_; |
51 const ParsedFunction& parsed_function_; | 335 const ParsedFunction& parsed_function_; |
| 336 const GrowableArray<BlockEntryInstr*>& block_order_; |
| 337 |
| 338 // Compiler specific per-block state. Indexed by postorder block number |
| 339 // for convenience. This is not the block's index in the block order, |
| 340 // which is reverse postorder. |
| 341 BlockEntryInstr* current_block_; |
| 342 ExceptionHandlerList* exception_handlers_list_; |
| 343 DescriptorList* pc_descriptors_list_; |
| 344 StackmapTableBuilder* stackmap_table_builder_; |
| 345 GrowableArray<BlockInfo*> block_info_; |
| 346 GrowableArray<CompilerDeoptInfo*> deopt_infos_; |
| 347 GrowableArray<SlowPathCode*> slow_path_code_; |
| 348 // Stores: [code offset, function, null(code)]. |
| 349 const GrowableObjectArray& static_calls_target_table_; |
52 const bool is_optimizing_; | 350 const bool is_optimizing_; |
| 351 // Set to true if optimized code has IC calls. |
| 352 bool may_reoptimize_; |
| 353 |
| 354 const Class& double_class_; |
| 355 |
| 356 ParallelMoveResolver parallel_move_resolver_; |
| 357 |
| 358 // Currently instructions generate deopt stubs internally by |
| 359 // calling AddDeoptStub. To communicate deoptimization environment |
| 360 // that should be used when deoptimizing we store it in this variable. |
| 361 // In future AddDeoptStub should be moved out of the instruction template. |
| 362 Environment* pending_deoptimization_env_; |
53 | 363 |
54 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); | 364 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); |
55 }; | 365 }; |
56 | 366 |
57 } // namespace dart | 367 } // namespace dart |
58 | 368 |
59 #endif // VM_FLOW_GRAPH_COMPILER_ARM_H_ | 369 #endif // VM_FLOW_GRAPH_COMPILER_ARM_H_ |
OLD | NEW |