| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 28 #ifndef V8_A64_LITHIUM_CODEGEN_A64_H_ |
| 29 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 29 #define V8_A64_LITHIUM_CODEGEN_A64_H_ |
| 30 | 30 |
| 31 #include "arm/lithium-arm.h" | 31 #include "a64/lithium-a64.h" |
| 32 | 32 |
| 33 #include "arm/lithium-gap-resolver-arm.h" | 33 #include "a64/lithium-gap-resolver-a64.h" |
| 34 #include "deoptimizer.h" | 34 #include "deoptimizer.h" |
| 35 #include "safepoint-table.h" | 35 #include "safepoint-table.h" |
| 36 #include "scopes.h" | 36 #include "scopes.h" |
| 37 #include "v8utils.h" | 37 #include "v8utils.h" |
| 38 | 38 |
| 39 namespace v8 { | 39 namespace v8 { |
| 40 namespace internal { | 40 namespace internal { |
| 41 | 41 |
| 42 // Forward declarations. | 42 // Forward declarations. |
| 43 class LDeferredCode; | 43 class LDeferredCode; |
| 44 class SafepointGenerator; | 44 class SafepointGenerator; |
| 45 class BranchGenerator; |
| 45 | 46 |
| 46 class LCodeGen BASE_EMBEDDED { | 47 class LCodeGen BASE_EMBEDDED { |
| 47 public: | 48 public: |
| 48 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | 49 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
| 49 : zone_(info->zone()), | 50 : zone_(info->zone()), |
| 50 chunk_(static_cast<LPlatformChunk*>(chunk)), | 51 chunk_(static_cast<LPlatformChunk*>(chunk)), |
| 51 masm_(assembler), | 52 masm_(assembler), |
| 52 info_(info), | 53 info_(info), |
| 53 current_block_(-1), | 54 current_block_(-1), |
| 54 current_instruction_(-1), | 55 current_instruction_(-1), |
| 55 instructions_(chunk->instructions()), | 56 instructions_(chunk->instructions()), |
| 56 deoptimizations_(4, info->zone()), | 57 deoptimizations_(4, info->zone()), |
| 57 deopt_jump_table_(4, info->zone()), | 58 deopt_jump_table_(4, info->zone()), |
| 58 deoptimization_literals_(8, info->zone()), | 59 deoptimization_literals_(8, info->zone()), |
| 59 inlined_function_count_(0), | 60 inlined_function_count_(0), |
| 60 scope_(info->scope()), | 61 scope_(info->scope()), |
| 61 status_(UNUSED), | 62 status_(UNUSED), |
| 62 translations_(info->zone()), | 63 translations_(info->zone()), |
| 63 deferred_(8, info->zone()), | 64 deferred_(8, info->zone()), |
| 64 osr_pc_offset_(-1), | 65 osr_pc_offset_(-1), |
| 65 last_lazy_deopt_pc_(0), | 66 last_lazy_deopt_pc_(0), |
| 66 frame_is_built_(false), | 67 frame_is_built_(false), |
| 67 safepoints_(info->zone()), | 68 safepoints_(info->zone()), |
| 68 resolver_(this), | 69 resolver_(this), |
| 69 expected_safepoint_kind_(Safepoint::kSimple) { | 70 expected_safepoint_kind_(Safepoint::kSimple) { |
| 70 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 71 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 71 } | 72 } |
| 72 | 73 |
| 73 | |
| 74 // Simple accessors. | 74 // Simple accessors. |
| 75 MacroAssembler* masm() const { return masm_; } | 75 MacroAssembler* masm() const { return masm_; } |
| 76 CompilationInfo* info() const { return info_; } | 76 CompilationInfo* info() const { return info_; } |
| 77 Zone* zone() const { return zone_; } |
| 78 HGraph* graph() const { return chunk()->graph(); } |
| 79 LPlatformChunk* chunk() const { return chunk_; } |
| 77 Isolate* isolate() const { return info_->isolate(); } | 80 Isolate* isolate() const { return info_->isolate(); } |
| 78 Factory* factory() const { return isolate()->factory(); } | 81 Factory* factory() const { return isolate()->factory(); } |
| 82 Scope* scope() const { return scope_; } |
| 79 Heap* heap() const { return isolate()->heap(); } | 83 Heap* heap() const { return isolate()->heap(); } |
| 80 Zone* zone() const { return zone_; } | |
| 81 | 84 |
| 82 // TODO(svenpanne) Use this consistently. | 85 // TODO(svenpanne) Use this consistently. |
| 83 int LookupDestination(int block_id) const { | 86 int LookupDestination(int block_id) const { |
| 84 return chunk()->LookupDestination(block_id); | 87 return chunk()->LookupDestination(block_id); |
| 85 } | 88 } |
| 86 | 89 |
| 87 bool IsNextEmittedBlock(int block_id) const { | 90 bool IsNextEmittedBlock(int block_id) const { |
| 88 return LookupDestination(block_id) == GetNextEmittedBlock(); | 91 return LookupDestination(block_id) == GetNextEmittedBlock(); |
| 89 } | 92 } |
| 90 | 93 |
| 91 bool NeedsEagerFrame() const { | 94 bool NeedsEagerFrame() const { |
| 92 return GetStackSlotCount() > 0 || | 95 return GetStackSlotCount() > 0 || |
| 93 info()->is_non_deferred_calling() || | 96 info()->is_non_deferred_calling() || |
| 94 !info()->IsStub() || | 97 !info()->IsStub() || |
| 95 info()->requires_frame(); | 98 info()->requires_frame(); |
| 96 } | 99 } |
| 97 bool NeedsDeferredFrame() const { | 100 bool NeedsDeferredFrame() const { |
| 98 return !NeedsEagerFrame() && info()->is_deferred_calling(); | 101 return !NeedsEagerFrame() && info()->is_deferred_calling(); |
| 99 } | 102 } |
| 100 | 103 |
| 101 LinkRegisterStatus GetLinkRegisterState() const { | 104 LinkRegisterStatus GetLinkRegisterState() const { |
| 102 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; | 105 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; |
| 103 } | 106 } |
| 104 | 107 |
| 105 // Support for converting LOperands to assembler types. | |
| 106 // LOperand must be a register. | |
| 107 Register ToRegister(LOperand* op) const; | |
| 108 | |
| 109 // LOperand is loaded into scratch, unless already a register. | |
| 110 Register EmitLoadRegister(LOperand* op, Register scratch); | |
| 111 | |
| 112 // LOperand must be a double register. | |
| 113 DwVfpRegister ToDoubleRegister(LOperand* op) const; | |
| 114 | |
| 115 // LOperand is loaded into dbl_scratch, unless already a double register. | |
| 116 DwVfpRegister EmitLoadDoubleRegister(LOperand* op, | |
| 117 SwVfpRegister flt_scratch, | |
| 118 DwVfpRegister dbl_scratch); | |
| 119 int ToInteger32(LConstantOperand* op) const; | |
| 120 Smi* ToSmi(LConstantOperand* op) const; | |
| 121 double ToDouble(LConstantOperand* op) const; | |
| 122 Operand ToOperand(LOperand* op); | |
| 123 MemOperand ToMemOperand(LOperand* op) const; | |
| 124 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. | |
| 125 MemOperand ToHighMemOperand(LOperand* op) const; | |
| 126 | |
| 127 bool IsInteger32(LConstantOperand* op) const; | |
| 128 bool IsSmi(LConstantOperand* op) const; | |
| 129 Handle<Object> ToHandle(LConstantOperand* op) const; | |
| 130 | |
| 131 // Try to generate code for the entire chunk, but it may fail if the | 108 // Try to generate code for the entire chunk, but it may fail if the |
| 132 // chunk contains constructs we cannot handle. Returns true if the | 109 // chunk contains constructs we cannot handle. Returns true if the |
| 133 // code generation attempt succeeded. | 110 // code generation attempt succeeded. |
| 134 bool GenerateCode(); | 111 bool GenerateCode(); |
| 135 | 112 |
| 136 // Finish the code by setting stack height, safepoint, and bailout | 113 // Finish the code by setting stack height, safepoint, and bailout |
| 137 // information on it. | 114 // information on it. |
| 138 void FinishCode(Handle<Code> code); | 115 void FinishCode(Handle<Code> code); |
| 139 | 116 |
| 140 // Deferred code support. | 117 // Support for converting LOperands to assembler types. |
| 141 void DoDeferredNumberTagD(LNumberTagD* instr); | 118 // LOperand must be a register. |
| 119 Register ToRegister(LOperand* op) const; |
| 120 Register ToRegister32(LOperand* op) const; |
| 121 Operand ToOperand(LOperand* op); |
| 122 Operand ToOperand32(LOperand* op); |
| 123 MemOperand ToMemOperand(LOperand* op) const; |
| 124 Handle<Object> ToHandle(LConstantOperand* op) const; |
| 142 | 125 |
| 143 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | 126 // TODO(jbramley): Examine these helpers and check that they make sense. |
| 144 void DoDeferredNumberTagI(LInstruction* instr, | 127 // IsInteger32Constant returns true for smi constants, for example. |
| 145 LOperand* value, | 128 bool IsInteger32Constant(LConstantOperand* op) const; |
| 146 IntegerSignedness signedness); | 129 bool IsSmi(LConstantOperand* op) const; |
| 147 | 130 |
| 148 void DoDeferredTaggedToI(LTaggedToI* instr); | 131 int ToInteger32(LConstantOperand* op) const; |
| 149 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | 132 Smi* ToSmi(LConstantOperand* op) const; |
| 150 void DoDeferredStackCheck(LStackCheck* instr); | 133 double ToDouble(LConstantOperand* op) const; |
| 151 void DoDeferredRandom(LRandom* instr); | 134 DoubleRegister ToDoubleRegister(LOperand* op) const; |
| 152 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
| 153 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
| 154 void DoDeferredAllocate(LAllocate* instr); | |
| 155 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
| 156 Label* map_check); | |
| 157 | |
| 158 void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env); | |
| 159 | |
| 160 // Parallel move support. | |
| 161 void DoParallelMove(LParallelMove* move); | |
| 162 void DoGap(LGap* instr); | |
| 163 | |
| 164 MemOperand PrepareKeyedOperand(Register key, | |
| 165 Register base, | |
| 166 bool key_is_constant, | |
| 167 int constant_key, | |
| 168 int element_size, | |
| 169 int shift_size, | |
| 170 int additional_index, | |
| 171 int additional_offset); | |
| 172 | |
| 173 // Emit frame translation commands for an environment. | |
| 174 void WriteTranslation(LEnvironment* environment, | |
| 175 Translation* translation, | |
| 176 int* arguments_index, | |
| 177 int* arguments_count); | |
| 178 | 135 |
| 179 // Declare methods that deal with the individual node types. | 136 // Declare methods that deal with the individual node types. |
| 180 #define DECLARE_DO(type) void Do##type(L##type* node); | 137 #define DECLARE_DO(type) void Do##type(L##type* node); |
| 181 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | 138 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) |
| 182 #undef DECLARE_DO | 139 #undef DECLARE_DO |
| 183 | 140 |
| 184 private: | 141 private: |
| 185 enum Status { | 142 enum Status { |
| 186 UNUSED, | 143 UNUSED, |
| 187 GENERATING, | 144 GENERATING, |
| 188 DONE, | 145 DONE, |
| 189 ABORTED | 146 ABORTED |
| 190 }; | 147 }; |
| 191 | 148 |
| 192 bool is_unused() const { return status_ == UNUSED; } | 149 bool is_unused() const { return status_ == UNUSED; } |
| 193 bool is_generating() const { return status_ == GENERATING; } | 150 bool is_generating() const { return status_ == GENERATING; } |
| 194 bool is_done() const { return status_ == DONE; } | 151 bool is_done() const { return status_ == DONE; } |
| 195 bool is_aborted() const { return status_ == ABORTED; } | 152 bool is_aborted() const { return status_ == ABORTED; } |
| 196 | 153 |
| 197 StrictModeFlag strict_mode_flag() const { | 154 // Return a double scratch register which can be used locally |
| 198 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; | 155 // when generating code for a lithium instruction. |
| 199 } | 156 DoubleRegister double_scratch() { return crankshaft_fp_scratch; } |
| 200 | 157 |
| 201 LPlatformChunk* chunk() const { return chunk_; } | 158 // Deferred code support. |
| 202 Scope* scope() const { return scope_; } | 159 void DoDeferredNumberTagD(LNumberTagD* instr); |
| 203 HGraph* graph() const { return chunk()->graph(); } | 160 void DoDeferredStackCheck(LStackCheck* instr); |
| 161 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); |
| 162 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); |
| 163 void DoDeferredMathAbsTagged(LMathAbsTagged* instr, |
| 164 Label* exit, |
| 165 Label* allocation_entry); |
| 204 | 166 |
| 205 Register scratch0() { return r9; } | 167 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; |
| 206 DwVfpRegister double_scratch0() { return kScratchDoubleReg; } | 168 void DoDeferredNumberTagI(LInstruction* instr, |
| 169 LOperand* value, |
| 170 LOperand* temp1, |
| 171 LOperand* temp2, |
| 172 IntegerSignedness signedness); |
| 173 void DoDeferredTaggedToI(LTaggedToI* instr, |
| 174 LOperand* value, |
| 175 LOperand* temp1, |
| 176 LOperand* temp2); |
| 177 void DoDeferredAllocate(LAllocate* instr); |
| 207 | 178 |
| 179 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 180 Label* map_check); |
| 181 |
| 182 static Condition TokenToCondition(Token::Value op, bool is_unsigned); |
| 183 void EmitGoto(int block); |
| 208 int GetNextEmittedBlock() const; | 184 int GetNextEmittedBlock() const; |
| 209 LInstruction* GetNextInstruction(); | 185 void DoGap(LGap* instr); |
| 210 | 186 |
| 211 void EmitClassOfTest(Label* if_true, | 187 // Generic version of EmitBranch. It contains some code to avoid emitting a |
| 212 Label* if_false, | 188 // branch on the next emitted basic block where we could just fall-through. |
| 213 Handle<String> class_name, | 189 // You shouldn't use that directly but rather consider one of the helper like |
| 214 Register input, | 190 // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch... |
| 215 Register temporary, | 191 template<class InstrType> |
| 216 Register temporary2); | 192 void EmitBranchGeneric(InstrType instr, |
| 193 const BranchGenerator& branch); |
| 194 |
| 195 template<class InstrType> |
| 196 void EmitBranch(InstrType instr, Condition condition); |
| 197 |
| 198 template<class InstrType> |
| 199 void EmitCompareAndBranch(InstrType instr, |
| 200 Condition condition, |
| 201 const Register& lhs, |
| 202 const Operand& rhs); |
| 203 |
| 204 template<class InstrType> |
| 205 void EmitTestAndBranch(InstrType instr, |
| 206 Condition condition, |
| 207 const Register& value, |
| 208 uint64_t mask); |
| 209 |
| 210 // Emits optimized code to deep-copy the contents of statically known object |
| 211 // graphs (e.g. object literal boilerplate). Expects a pointer to the |
| 212 // allocated destination object in the result register, and a pointer to the |
| 213 // source object in the source register. |
| 214 void EmitDeepCopy(Handle<JSObject> object, |
| 215 Register result, |
| 216 Register source, |
| 217 Register scratch, |
| 218 int* offset, |
| 219 AllocationSiteMode mode); |
| 220 |
| 221 // Emits optimized code for %_IsString(x). Preserves input register. |
| 222 // Returns the condition on which a final split to |
| 223 // true and false label should be made, to optimize fallthrough. |
| 224 Condition EmitIsString(Register input, Register temp1, Label* is_not_string); |
| 225 |
| 226 void EmitLoadFieldOrConstantFunction(Register result, |
| 227 Register object, |
| 228 Handle<Map> type, |
| 229 Handle<String> name, |
| 230 LEnvironment* env); |
| 231 |
| 232 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); |
| 233 int DefineDeoptimizationLiteral(Handle<Object> literal); |
| 234 void PopulateDeoptimizationData(Handle<Code> code); |
| 235 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 236 |
| 237 void Deoptimize(LEnvironment* environment); |
| 238 void Deoptimize(LEnvironment* environment, |
| 239 Deoptimizer::BailoutType bailout_type); |
| 240 void SoftDeoptimize(LEnvironment* environment); |
| 241 void DeoptimizeIf(Condition cc, LEnvironment* environment); |
| 242 void DeoptimizeIfZero(Register rt, LEnvironment* environment); |
| 243 void DeoptimizeIfNegative(Register rt, LEnvironment* environment); |
| 244 void DeoptimizeIfSmi(Register rt, LEnvironment* environment); |
| 245 void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment); |
| 246 void DeoptimizeIfRoot(Register rt, |
| 247 Heap::RootListIndex index, |
| 248 LEnvironment* environment); |
| 249 void DeoptimizeIfNotRoot(Register rt, |
| 250 Heap::RootListIndex index, |
| 251 LEnvironment* environment); |
| 252 |
| 253 MemOperand PrepareKeyedExternalArrayOperand(Register key, |
| 254 Register base, |
| 255 Register scratch, |
| 256 bool key_is_smi, |
| 257 bool key_is_constant, |
| 258 int constant_key, |
| 259 int element_size_shift, |
| 260 int additional_index); |
| 261 void CalcKeyedArrayBaseRegister(Register base, |
| 262 Register elements, |
| 263 Register key, |
| 264 bool key_is_tagged, |
| 265 ElementsKind elements_kind); |
| 266 |
| 267 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
| 268 Safepoint::DeoptMode mode); |
| 217 | 269 |
| 218 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | 270 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } |
| 219 | 271 |
| 220 void Abort(const char* reason); | 272 void Abort(const char* reason); |
| 221 void FPRINTF_CHECKING Comment(const char* format, ...); | 273 void FPRINTF_CHECKING Comment(const char* format, ...); |
| 222 | 274 |
| 223 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | 275 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } |
| 224 | 276 |
| 225 // Code generation passes. Returns true if code generation should | 277 // Emit frame translation commands for an environment. |
| 226 // continue. | 278 void WriteTranslation(LEnvironment* environment, |
| 279 Translation* translation, |
| 280 int* arguments_index, |
| 281 int* arguments_count); |
| 282 |
| 283 void AddToTranslation(Translation* translation, |
| 284 LOperand* op, |
| 285 bool is_tagged, |
| 286 bool is_uint32, |
| 287 bool arguments_known, |
| 288 int arguments_index, |
| 289 int arguments_count); |
| 290 |
| 291 // Code generation steps. Returns true if code generation should continue. |
| 227 bool GeneratePrologue(); | 292 bool GeneratePrologue(); |
| 228 bool GenerateBody(); | 293 bool GenerateBody(); |
| 229 bool GenerateDeferredCode(); | 294 bool GenerateDeferredCode(); |
| 230 bool GenerateDeoptJumpTable(); | 295 bool GenerateDeoptJumpTable(); |
| 231 bool GenerateSafepointTable(); | 296 bool GenerateSafepointTable(); |
| 232 | 297 |
| 233 enum SafepointMode { | 298 enum SafepointMode { |
| 234 RECORD_SIMPLE_SAFEPOINT, | 299 RECORD_SIMPLE_SAFEPOINT, |
| 235 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | 300 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS |
| 236 }; | 301 }; |
| 237 | 302 |
| 238 void CallCode( | 303 void CallCode(Handle<Code> code, |
| 239 Handle<Code> code, | 304 RelocInfo::Mode mode, |
| 240 RelocInfo::Mode mode, | 305 LInstruction* instr); |
| 241 LInstruction* instr, | |
| 242 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); | |
| 243 | 306 |
| 244 void CallCodeGeneric( | 307 void CallCodeGeneric(Handle<Code> code, |
| 245 Handle<Code> code, | 308 RelocInfo::Mode mode, |
| 246 RelocInfo::Mode mode, | 309 LInstruction* instr, |
| 247 LInstruction* instr, | 310 SafepointMode safepoint_mode); |
| 248 SafepointMode safepoint_mode, | |
| 249 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); | |
| 250 | 311 |
| 251 void CallRuntime(const Runtime::Function* function, | 312 void CallRuntime(const Runtime::Function* function, |
| 252 int num_arguments, | 313 int num_arguments, |
| 253 LInstruction* instr); | 314 LInstruction* instr); |
| 254 | 315 |
| 255 void CallRuntime(Runtime::FunctionId id, | 316 void CallRuntime(Runtime::FunctionId id, |
| 256 int num_arguments, | 317 int num_arguments, |
| 257 LInstruction* instr) { | 318 LInstruction* instr) { |
| 258 const Runtime::Function* function = Runtime::FunctionForId(id); | 319 const Runtime::Function* function = Runtime::FunctionForId(id); |
| 259 CallRuntime(function, num_arguments, instr); | 320 CallRuntime(function, num_arguments, instr); |
| 260 } | 321 } |
| 261 | 322 |
| 262 void CallRuntimeFromDeferred(Runtime::FunctionId id, | 323 void CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 263 int argc, | 324 int argc, |
| 264 LInstruction* instr); | 325 LInstruction* instr); |
| 265 | 326 |
| 266 enum R1State { | 327 // Generate a direct call to a known function. |
| 267 R1_UNINITIALIZED, | 328 // If the function is already loaded into x1 by the caller, function_reg may |
| 268 R1_CONTAINS_TARGET | 329 // be set to x1. Otherwise, it must be NoReg, and CallKnownFunction will |
| 269 }; | 330 // automatically load it. |
| 270 | |
| 271 // Generate a direct call to a known function. Expects the function | |
| 272 // to be in r1. | |
| 273 void CallKnownFunction(Handle<JSFunction> function, | 331 void CallKnownFunction(Handle<JSFunction> function, |
| 274 int formal_parameter_count, | 332 int formal_parameter_count, |
| 275 int arity, | 333 int arity, |
| 276 LInstruction* instr, | 334 LInstruction* instr, |
| 277 CallKind call_kind, | 335 CallKind call_kind, |
| 278 R1State r1_state); | 336 Register function_reg = NoReg); |
| 279 | |
| 280 void LoadHeapObject(Register result, Handle<HeapObject> object); | |
| 281 | |
| 282 void RecordSafepointWithLazyDeopt(LInstruction* instr, | |
| 283 SafepointMode safepoint_mode); | |
| 284 | |
| 285 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | |
| 286 Safepoint::DeoptMode mode); | |
| 287 void DeoptimizeIf(Condition cc, | |
| 288 LEnvironment* environment, | |
| 289 Deoptimizer::BailoutType bailout_type); | |
| 290 void DeoptimizeIf(Condition cc, LEnvironment* environment); | |
| 291 void SoftDeoptimize(LEnvironment* environment); | |
| 292 | |
| 293 void AddToTranslation(Translation* translation, | |
| 294 LOperand* op, | |
| 295 bool is_tagged, | |
| 296 bool is_uint32, | |
| 297 bool arguments_known, | |
| 298 int arguments_index, | |
| 299 int arguments_count); | |
| 300 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); | |
| 301 void PopulateDeoptimizationData(Handle<Code> code); | |
| 302 int DefineDeoptimizationLiteral(Handle<Object> literal); | |
| 303 | |
| 304 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
| 305 | |
| 306 Register ToRegister(int index) const; | |
| 307 DwVfpRegister ToDoubleRegister(int index) const; | |
| 308 | |
| 309 void EmitIntegerMathAbs(LMathAbs* instr); | |
| 310 | 337 |
| 311 // Support for recording safepoint and position information. | 338 // Support for recording safepoint and position information. |
| 339 void RecordPosition(int position); |
| 312 void RecordSafepoint(LPointerMap* pointers, | 340 void RecordSafepoint(LPointerMap* pointers, |
| 313 Safepoint::Kind kind, | 341 Safepoint::Kind kind, |
| 314 int arguments, | 342 int arguments, |
| 315 Safepoint::DeoptMode mode); | 343 Safepoint::DeoptMode mode); |
| 316 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | 344 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); |
| 317 void RecordSafepoint(Safepoint::DeoptMode mode); | 345 void RecordSafepoint(Safepoint::DeoptMode mode); |
| 318 void RecordSafepointWithRegisters(LPointerMap* pointers, | 346 void RecordSafepointWithRegisters(LPointerMap* pointers, |
| 319 int arguments, | 347 int arguments, |
| 320 Safepoint::DeoptMode mode); | 348 Safepoint::DeoptMode mode); |
| 321 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, | 349 void RecordSafepointWithLazyDeopt(LInstruction* instr, |
| 322 int arguments, | 350 SafepointMode safepoint_mode); |
| 323 Safepoint::DeoptMode mode); | |
| 324 void RecordPosition(int position); | |
| 325 | |
| 326 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
| 327 void EmitGoto(int block); | |
| 328 void EmitBranch(int left_block, int right_block, Condition cc); | |
| 329 void EmitNumberUntagD(Register input, | |
| 330 DwVfpRegister result, | |
| 331 bool allow_undefined_as_nan, | |
| 332 bool deoptimize_on_minus_zero, | |
| 333 LEnvironment* env, | |
| 334 NumberUntagDMode mode); | |
| 335 | |
| 336 // Emits optimized code for typeof x == "y". Modifies input register. | |
| 337 // Returns the condition on which a final split to | |
| 338 // true and false label should be made, to optimize fallthrough. | |
| 339 Condition EmitTypeofIs(Label* true_label, | |
| 340 Label* false_label, | |
| 341 Register input, | |
| 342 Handle<String> type_name); | |
| 343 | |
| 344 // Emits optimized code for %_IsObject(x). Preserves input register. | |
| 345 // Returns the condition on which a final split to | |
| 346 // true and false label should be made, to optimize fallthrough. | |
| 347 Condition EmitIsObject(Register input, | |
| 348 Register temp1, | |
| 349 Label* is_not_object, | |
| 350 Label* is_object); | |
| 351 | |
| 352 // Emits optimized code for %_IsString(x). Preserves input register. | |
| 353 // Returns the condition on which a final split to | |
| 354 // true and false label should be made, to optimize fallthrough. | |
| 355 Condition EmitIsString(Register input, | |
| 356 Register temp1, | |
| 357 Label* is_not_string); | |
| 358 | |
| 359 // Emits optimized code for %_IsConstructCall(). | |
| 360 // Caller should branch on equal condition. | |
| 361 void EmitIsConstructCall(Register temp1, Register temp2); | |
| 362 | |
| 363 void EmitLoadFieldOrConstantFunction(Register result, | |
| 364 Register object, | |
| 365 Handle<Map> type, | |
| 366 Handle<String> name, | |
| 367 LEnvironment* env); | |
| 368 | |
| 369 // Emits optimized code to deep-copy the contents of statically known | |
| 370 // object graphs (e.g. object literal boilerplate). | |
| 371 void EmitDeepCopy(Handle<JSObject> object, | |
| 372 Register result, | |
| 373 Register source, | |
| 374 int* offset, | |
| 375 AllocationSiteMode mode); | |
| 376 | |
| 377 // Emit optimized code for integer division. | |
| 378 // Inputs are signed. | |
| 379 // All registers are clobbered. | |
| 380 // If 'remainder' is no_reg, it is not computed. | |
| 381 void EmitSignedIntegerDivisionByConstant(Register result, | |
| 382 Register dividend, | |
| 383 int32_t divisor, | |
| 384 Register remainder, | |
| 385 Register scratch, | |
| 386 LEnvironment* environment); | |
| 387 | 351 |
| 388 void EnsureSpaceForLazyDeopt(); | 352 void EnsureSpaceForLazyDeopt(); |
| 389 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | |
| 390 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | |
| 391 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | |
| 392 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | |
| 393 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | |
| 394 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | |
| 395 | 353 |
| 396 Zone* zone_; | 354 Zone* zone_; |
| 397 LPlatformChunk* const chunk_; | 355 LPlatformChunk* const chunk_; |
| 398 MacroAssembler* const masm_; | 356 MacroAssembler* const masm_; |
| 399 CompilationInfo* const info_; | 357 CompilationInfo* const info_; |
| 400 | 358 |
| 401 int current_block_; | 359 int current_block_; |
| 402 int current_instruction_; | 360 int current_instruction_; |
| 403 const ZoneList<LInstruction*>* instructions_; | 361 const ZoneList<LInstruction*>* instructions_; |
| 404 ZoneList<LEnvironment*> deoptimizations_; | 362 ZoneList<LEnvironment*> deoptimizations_; |
| 405 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_; | 363 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_; |
| 406 ZoneList<Handle<Object> > deoptimization_literals_; | 364 ZoneList<Handle<Object> > deoptimization_literals_; |
| 407 int inlined_function_count_; | 365 int inlined_function_count_; |
| 408 Scope* const scope_; | 366 Scope* const scope_; |
| 409 Status status_; | 367 Status status_; |
| 410 TranslationBuffer translations_; | 368 TranslationBuffer translations_; |
| 411 ZoneList<LDeferredCode*> deferred_; | 369 ZoneList<LDeferredCode*> deferred_; |
| 412 int osr_pc_offset_; | 370 int osr_pc_offset_; |
| 413 int last_lazy_deopt_pc_; | 371 int last_lazy_deopt_pc_; |
| 414 bool frame_is_built_; | 372 bool frame_is_built_; |
| 415 | 373 |
| 416 // Builder that keeps track of safepoints in the code. The table | 374 // Builder that keeps track of safepoints in the code. The table itself is |
| 417 // itself is emitted at the end of the generated code. | 375 // emitted at the end of the generated code. |
| 418 SafepointTableBuilder safepoints_; | 376 SafepointTableBuilder safepoints_; |
| 419 | 377 |
| 420 // Compiler from a set of parallel moves to a sequential list of moves. | 378 // Compiler from a set of parallel moves to a sequential list of moves. |
| 421 LGapResolver resolver_; | 379 LGapResolver resolver_; |
| 422 | 380 |
| 423 Safepoint::Kind expected_safepoint_kind_; | 381 Safepoint::Kind expected_safepoint_kind_; |
| 424 | 382 |
| 425 class PushSafepointRegistersScope BASE_EMBEDDED { | 383 class PushSafepointRegistersScope BASE_EMBEDDED { |
| 426 public: | 384 public: |
| 427 PushSafepointRegistersScope(LCodeGen* codegen, | 385 PushSafepointRegistersScope(LCodeGen* codegen, |
| 428 Safepoint::Kind kind) | 386 Safepoint::Kind kind) |
| 429 : codegen_(codegen) { | 387 : codegen_(codegen) { |
| 430 ASSERT(codegen_->info()->is_calling()); | 388 ASSERT(codegen_->info()->is_calling()); |
| 431 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | 389 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); |
| 432 codegen_->expected_safepoint_kind_ = kind; | 390 codegen_->expected_safepoint_kind_ = kind; |
| 433 | 391 |
| 434 switch (codegen_->expected_safepoint_kind_) { | 392 switch (codegen_->expected_safepoint_kind_) { |
| 435 case Safepoint::kWithRegisters: | 393 case Safepoint::kWithRegisters: |
| 436 codegen_->masm_->PushSafepointRegisters(); | 394 codegen_->masm_->PushSafepointRegisters(); |
| 437 break; | 395 break; |
| 438 case Safepoint::kWithRegistersAndDoubles: | 396 case Safepoint::kWithRegistersAndDoubles: |
| 439 codegen_->masm_->PushSafepointRegistersAndDoubles(); | 397 UNIMPLEMENTED(); |
| 440 break; | 398 break; |
| 441 default: | 399 default: |
| 442 UNREACHABLE(); | 400 UNREACHABLE(); |
| 443 } | 401 } |
| 444 } | 402 } |
| 445 | 403 |
| 446 ~PushSafepointRegistersScope() { | 404 ~PushSafepointRegistersScope() { |
| 447 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; | 405 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; |
| 448 ASSERT((kind & Safepoint::kWithRegisters) != 0); | 406 ASSERT((kind & Safepoint::kWithRegisters) != 0); |
| 449 switch (kind) { | 407 switch (kind) { |
| 450 case Safepoint::kWithRegisters: | 408 case Safepoint::kWithRegisters: |
| 451 codegen_->masm_->PopSafepointRegisters(); | 409 codegen_->masm_->PopSafepointRegisters(); |
| 452 break; | 410 break; |
| 453 case Safepoint::kWithRegistersAndDoubles: | 411 case Safepoint::kWithRegistersAndDoubles: |
| 454 codegen_->masm_->PopSafepointRegistersAndDoubles(); | 412 UNIMPLEMENTED(); |
| 455 break; | 413 break; |
| 456 default: | 414 default: |
| 457 UNREACHABLE(); | 415 UNREACHABLE(); |
| 458 } | 416 } |
| 459 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | 417 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; |
| 460 } | 418 } |
| 461 | 419 |
| 462 private: | 420 private: |
| 463 LCodeGen* codegen_; | 421 LCodeGen* codegen_; |
| 464 }; | 422 }; |
| 465 | 423 |
| 466 friend class LDeferredCode; | 424 friend class LDeferredCode; |
| 467 friend class LEnvironment; | |
| 468 friend class SafepointGenerator; | 425 friend class SafepointGenerator; |
| 469 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | 426 DISALLOW_COPY_AND_ASSIGN(LCodeGen); |
| 470 }; | 427 }; |
| 471 | 428 |
| 472 | 429 |
| 473 class LDeferredCode: public ZoneObject { | 430 class LDeferredCode: public ZoneObject { |
| 474 public: | 431 public: |
| 475 explicit LDeferredCode(LCodeGen* codegen) | 432 explicit LDeferredCode(LCodeGen* codegen) |
| 476 : codegen_(codegen), | 433 : codegen_(codegen), |
| 477 external_exit_(NULL), | 434 external_exit_(NULL), |
| 478 instruction_index_(codegen->current_instruction_) { | 435 instruction_index_(codegen->current_instruction_) { |
| 479 codegen->AddDeferredCode(this); | 436 codegen->AddDeferredCode(this); |
| 480 } | 437 } |
| 481 | 438 |
| 482 virtual ~LDeferredCode() { } | 439 virtual ~LDeferredCode() { } |
| 483 virtual void Generate() = 0; | 440 virtual void Generate() = 0; |
| 484 virtual LInstruction* instr() = 0; | 441 virtual LInstruction* instr() = 0; |
| 485 | 442 |
| 486 void SetExit(Label* exit) { external_exit_ = exit; } | 443 void SetExit(Label* exit) { external_exit_ = exit; } |
| 487 Label* entry() { return &entry_; } | 444 Label* entry() { return &entry_; } |
| 488 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } | 445 Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; } |
| 489 int instruction_index() const { return instruction_index_; } | 446 int instruction_index() const { return instruction_index_; } |
| 490 | 447 |
| 491 protected: | 448 protected: |
| 492 LCodeGen* codegen() const { return codegen_; } | 449 LCodeGen* codegen() const { return codegen_; } |
| 493 MacroAssembler* masm() const { return codegen_->masm(); } | 450 MacroAssembler* masm() const { return codegen_->masm(); } |
| 494 | 451 |
| 495 private: | 452 private: |
| 496 LCodeGen* codegen_; | 453 LCodeGen* codegen_; |
| 497 Label entry_; | 454 Label entry_; |
| 498 Label exit_; | 455 Label exit_; |
| 499 Label* external_exit_; | 456 Label* external_exit_; |
| 500 int instruction_index_; | 457 int instruction_index_; |
| 501 }; | 458 }; |
| 502 | 459 |
| 460 |
| 461 // This is the abstract class used by EmitBranchGeneric. |
| 462 // It is used to emit code for conditional branching. The Emit() function |
| 463 // emits code to branch when the condition holds and EmitInverted() emits |
| 464 // the branch when the inverted condition is verified. |
| 465 // |
| 466 // For actual examples of condition see the concrete implementation in |
| 467 // lithium-codegen-a64.cc (e.g. BranchOnCondition, CompareAndBranch). |
| 468 class BranchGenerator BASE_EMBEDDED { |
| 469 public: |
| 470 explicit BranchGenerator(LCodeGen* codegen) |
| 471 : codegen_(codegen) { } |
| 472 |
| 473 virtual ~BranchGenerator() { } |
| 474 |
| 475 virtual void Emit(Label* label) const = 0; |
| 476 virtual void EmitInverted(Label* label) const = 0; |
| 477 |
| 478 protected: |
| 479 MacroAssembler* masm() const { return codegen_->masm(); } |
| 480 |
| 481 LCodeGen* codegen_; |
| 482 }; |
| 483 |
| 503 } } // namespace v8::internal | 484 } } // namespace v8::internal |
| 504 | 485 |
| 505 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 486 #endif // V8_A64_LITHIUM_CODEGEN_A64_H_ |
| OLD | NEW |