| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 28 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_ |
| 29 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 29 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_ |
| 30 | 30 |
| 31 #include "arm/lithium-arm.h" | 31 #include "arm/lithium-arm.h" |
| 32 | 32 #include "arm/lithium-gap-resolver-arm.h" |
| 33 #include "deoptimizer.h" | 33 #include "deoptimizer.h" |
| 34 #include "safepoint-table.h" | 34 #include "safepoint-table.h" |
| 35 #include "scopes.h" | 35 #include "scopes.h" |
| 36 | 36 |
| 37 namespace v8 { | 37 namespace v8 { |
| 38 namespace internal { | 38 namespace internal { |
| 39 | 39 |
| 40 // Forward declarations. | 40 // Forward declarations. |
| 41 class LDeferredCode; | 41 class LDeferredCode; |
| 42 class LGapNode; | |
| 43 class SafepointGenerator; | 42 class SafepointGenerator; |
| 44 | 43 |
| 45 class LGapResolver BASE_EMBEDDED { | |
| 46 public: | |
| 47 LGapResolver(); | |
| 48 const ZoneList<LMoveOperands>* Resolve(const ZoneList<LMoveOperands>* moves, | |
| 49 LOperand* marker_operand); | |
| 50 | |
| 51 private: | |
| 52 LGapNode* LookupNode(LOperand* operand); | |
| 53 bool CanReach(LGapNode* a, LGapNode* b, int visited_id); | |
| 54 bool CanReach(LGapNode* a, LGapNode* b); | |
| 55 void RegisterMove(LMoveOperands move); | |
| 56 void AddResultMove(LOperand* from, LOperand* to); | |
| 57 void AddResultMove(LGapNode* from, LGapNode* to); | |
| 58 void ResolveCycle(LGapNode* start, LOperand* marker_operand); | |
| 59 | |
| 60 ZoneList<LGapNode*> nodes_; | |
| 61 ZoneList<LGapNode*> identified_cycles_; | |
| 62 ZoneList<LMoveOperands> result_; | |
| 63 int next_visited_id_; | |
| 64 }; | |
| 65 | |
| 66 | |
| 67 class LCodeGen BASE_EMBEDDED { | 44 class LCodeGen BASE_EMBEDDED { |
| 68 public: | 45 public: |
| 69 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | 46 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
| 70 : chunk_(chunk), | 47 : chunk_(chunk), |
| 71 masm_(assembler), | 48 masm_(assembler), |
| 72 info_(info), | 49 info_(info), |
| 73 current_block_(-1), | 50 current_block_(-1), |
| 74 current_instruction_(-1), | 51 current_instruction_(-1), |
| 75 instructions_(chunk->instructions()), | 52 instructions_(chunk->instructions()), |
| 76 deoptimizations_(4), | 53 deoptimizations_(4), |
| 77 deoptimization_literals_(8), | 54 deoptimization_literals_(8), |
| 78 inlined_function_count_(0), | 55 inlined_function_count_(0), |
| 79 scope_(chunk->graph()->info()->scope()), | 56 scope_(chunk->graph()->info()->scope()), |
| 80 status_(UNUSED), | 57 status_(UNUSED), |
| 81 deferred_(8), | 58 deferred_(8), |
| 82 osr_pc_offset_(-1) { | 59 osr_pc_offset_(-1), |
| 60 resolver_(this) { |
| 83 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 61 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 84 } | 62 } |
| 85 | 63 |
| 64 |
| 65 // Simple accessors. |
| 66 MacroAssembler* masm() const { return masm_; } |
| 67 |
| 68 // Support for converting LOperands to assembler types. |
| 69 // LOperand must be a register. |
| 70 Register ToRegister(LOperand* op) const; |
| 71 |
| 72 // LOperand is loaded into scratch, unless already a register. |
| 73 Register EmitLoadRegister(LOperand* op, Register scratch); |
| 74 |
| 75 // LOperand must be a double register. |
| 76 DoubleRegister ToDoubleRegister(LOperand* op) const; |
| 77 |
| 78 // LOperand is loaded into dbl_scratch, unless already a double register. |
| 79 DoubleRegister EmitLoadDoubleRegister(LOperand* op, |
| 80 SwVfpRegister flt_scratch, |
| 81 DoubleRegister dbl_scratch); |
| 82 int ToInteger32(LConstantOperand* op) const; |
| 83 Operand ToOperand(LOperand* op); |
| 84 MemOperand ToMemOperand(LOperand* op) const; |
| 85 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. |
| 86 MemOperand ToHighMemOperand(LOperand* op) const; |
| 87 |
| 86 // Try to generate code for the entire chunk, but it may fail if the | 88 // Try to generate code for the entire chunk, but it may fail if the |
| 87 // chunk contains constructs we cannot handle. Returns true if the | 89 // chunk contains constructs we cannot handle. Returns true if the |
| 88 // code generation attempt succeeded. | 90 // code generation attempt succeeded. |
| 89 bool GenerateCode(); | 91 bool GenerateCode(); |
| 90 | 92 |
| 91 // Finish the code by setting stack height, safepoint, and bailout | 93 // Finish the code by setting stack height, safepoint, and bailout |
| 92 // information on it. | 94 // information on it. |
| 93 void FinishCode(Handle<Code> code); | 95 void FinishCode(Handle<Code> code); |
| 94 | 96 |
| 95 // Deferred code support. | 97 // Deferred code support. |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 129 bool is_done() const { return status_ == DONE; } | 131 bool is_done() const { return status_ == DONE; } |
| 130 bool is_aborted() const { return status_ == ABORTED; } | 132 bool is_aborted() const { return status_ == ABORTED; } |
| 131 | 133 |
| 132 int strict_mode_flag() const { | 134 int strict_mode_flag() const { |
| 133 return info_->is_strict() ? kStrictMode : kNonStrictMode; | 135 return info_->is_strict() ? kStrictMode : kNonStrictMode; |
| 134 } | 136 } |
| 135 | 137 |
| 136 LChunk* chunk() const { return chunk_; } | 138 LChunk* chunk() const { return chunk_; } |
| 137 Scope* scope() const { return scope_; } | 139 Scope* scope() const { return scope_; } |
| 138 HGraph* graph() const { return chunk_->graph(); } | 140 HGraph* graph() const { return chunk_->graph(); } |
| 139 MacroAssembler* masm() const { return masm_; } | |
| 140 | 141 |
| 141 Register scratch0() { return r9; } | 142 Register scratch0() { return r9; } |
| 142 DwVfpRegister double_scratch0() { return d0; } | 143 DwVfpRegister double_scratch0() { return d0; } |
| 143 | 144 |
| 144 int GetNextEmittedBlock(int block); | 145 int GetNextEmittedBlock(int block); |
| 145 LInstruction* GetNextInstruction(); | 146 LInstruction* GetNextInstruction(); |
| 146 | 147 |
| 147 void EmitClassOfTest(Label* if_true, | 148 void EmitClassOfTest(Label* if_true, |
| 148 Label* if_false, | 149 Label* if_false, |
| 149 Handle<String> class_name, | 150 Handle<String> class_name, |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 195 LOperand* op, | 196 LOperand* op, |
| 196 bool is_tagged); | 197 bool is_tagged); |
| 197 void PopulateDeoptimizationData(Handle<Code> code); | 198 void PopulateDeoptimizationData(Handle<Code> code); |
| 198 int DefineDeoptimizationLiteral(Handle<Object> literal); | 199 int DefineDeoptimizationLiteral(Handle<Object> literal); |
| 199 | 200 |
| 200 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 201 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 201 | 202 |
| 202 Register ToRegister(int index) const; | 203 Register ToRegister(int index) const; |
| 203 DoubleRegister ToDoubleRegister(int index) const; | 204 DoubleRegister ToDoubleRegister(int index) const; |
| 204 | 205 |
| 205 // LOperand must be a register. | |
| 206 Register ToRegister(LOperand* op) const; | |
| 207 | |
| 208 // LOperand is loaded into scratch, unless already a register. | |
| 209 Register EmitLoadRegister(LOperand* op, Register scratch); | |
| 210 | |
| 211 // LOperand must be a double register. | |
| 212 DoubleRegister ToDoubleRegister(LOperand* op) const; | |
| 213 | |
| 214 // LOperand is loaded into dbl_scratch, unless already a double register. | |
| 215 DoubleRegister EmitLoadDoubleRegister(LOperand* op, | |
| 216 SwVfpRegister flt_scratch, | |
| 217 DoubleRegister dbl_scratch); | |
| 218 | |
| 219 int ToInteger32(LConstantOperand* op) const; | |
| 220 Operand ToOperand(LOperand* op); | |
| 221 MemOperand ToMemOperand(LOperand* op) const; | |
| 222 | |
| 223 // Specific math operations - used from DoUnaryMathOperation. | 206 // Specific math operations - used from DoUnaryMathOperation. |
| 224 void EmitIntegerMathAbs(LUnaryMathOperation* instr); | 207 void EmitIntegerMathAbs(LUnaryMathOperation* instr); |
| 225 void DoMathAbs(LUnaryMathOperation* instr); | 208 void DoMathAbs(LUnaryMathOperation* instr); |
| 226 void EmitVFPTruncate(VFPRoundingMode rounding_mode, | 209 void EmitVFPTruncate(VFPRoundingMode rounding_mode, |
| 227 SwVfpRegister result, | 210 SwVfpRegister result, |
| 228 DwVfpRegister double_input, | 211 DwVfpRegister double_input, |
| 229 Register scratch1, | 212 Register scratch1, |
| 230 Register scratch2); | 213 Register scratch2); |
| 231 void DoMathFloor(LUnaryMathOperation* instr); | 214 void DoMathFloor(LUnaryMathOperation* instr); |
| 232 void DoMathSqrt(LUnaryMathOperation* instr); | 215 void DoMathSqrt(LUnaryMathOperation* instr); |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 private: | 306 private: |
| 324 LCodeGen* codegen_; | 307 LCodeGen* codegen_; |
| 325 Label entry_; | 308 Label entry_; |
| 326 Label exit_; | 309 Label exit_; |
| 327 Label* external_exit_; | 310 Label* external_exit_; |
| 328 }; | 311 }; |
| 329 | 312 |
| 330 } } // namespace v8::internal | 313 } } // namespace v8::internal |
| 331 | 314 |
| 332 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 315 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_ |
| OLD | NEW |