| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #ifndef V8_A64_LITHIUM_CODEGEN_A64_H_ | |
| 29 #define V8_A64_LITHIUM_CODEGEN_A64_H_ | |
| 30 | |
| 31 #include "a64/lithium-a64.h" | |
| 32 | |
| 33 #include "a64/lithium-gap-resolver-a64.h" | |
| 34 #include "deoptimizer.h" | |
| 35 #include "lithium-codegen.h" | |
| 36 #include "safepoint-table.h" | |
| 37 #include "scopes.h" | |
| 38 #include "v8utils.h" | |
| 39 | |
| 40 namespace v8 { | |
| 41 namespace internal { | |
| 42 | |
| 43 // Forward declarations. | |
| 44 class LDeferredCode; | |
| 45 class SafepointGenerator; | |
| 46 class BranchGenerator; | |
| 47 | |
| 48 class LCodeGen: public LCodeGenBase { | |
| 49 public: | |
| 50 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | |
| 51 : LCodeGenBase(chunk, assembler, info), | |
| 52 deoptimizations_(4, info->zone()), | |
| 53 deopt_jump_table_(4, info->zone()), | |
| 54 deoptimization_literals_(8, info->zone()), | |
| 55 inlined_function_count_(0), | |
| 56 scope_(info->scope()), | |
| 57 translations_(info->zone()), | |
| 58 deferred_(8, info->zone()), | |
| 59 osr_pc_offset_(-1), | |
| 60 frame_is_built_(false), | |
| 61 safepoints_(info->zone()), | |
| 62 resolver_(this), | |
| 63 expected_safepoint_kind_(Safepoint::kSimple) { | |
| 64 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
| 65 } | |
| 66 | |
| 67 // Simple accessors. | |
| 68 Scope* scope() const { return scope_; } | |
| 69 | |
| 70 int LookupDestination(int block_id) const { | |
| 71 return chunk()->LookupDestination(block_id); | |
| 72 } | |
| 73 | |
| 74 bool IsNextEmittedBlock(int block_id) const { | |
| 75 return LookupDestination(block_id) == GetNextEmittedBlock(); | |
| 76 } | |
| 77 | |
| 78 bool NeedsEagerFrame() const { | |
| 79 return GetStackSlotCount() > 0 || | |
| 80 info()->is_non_deferred_calling() || | |
| 81 !info()->IsStub() || | |
| 82 info()->requires_frame(); | |
| 83 } | |
| 84 bool NeedsDeferredFrame() const { | |
| 85 return !NeedsEagerFrame() && info()->is_deferred_calling(); | |
| 86 } | |
| 87 | |
| 88 LinkRegisterStatus GetLinkRegisterState() const { | |
| 89 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; | |
| 90 } | |
| 91 | |
| 92 // Try to generate code for the entire chunk, but it may fail if the | |
| 93 // chunk contains constructs we cannot handle. Returns true if the | |
| 94 // code generation attempt succeeded. | |
| 95 bool GenerateCode(); | |
| 96 | |
| 97 // Finish the code by setting stack height, safepoint, and bailout | |
| 98 // information on it. | |
| 99 void FinishCode(Handle<Code> code); | |
| 100 | |
| 101 // Support for converting LOperands to assembler types. | |
| 102 // LOperand must be a register. | |
| 103 Register ToRegister(LOperand* op) const; | |
| 104 Register ToRegister32(LOperand* op) const; | |
| 105 Operand ToOperand(LOperand* op); | |
| 106 Operand ToOperand32I(LOperand* op); | |
| 107 Operand ToOperand32U(LOperand* op); | |
| 108 MemOperand ToMemOperand(LOperand* op) const; | |
| 109 Handle<Object> ToHandle(LConstantOperand* op) const; | |
| 110 | |
| 111 // TODO(jbramley): Examine these helpers and check that they make sense. | |
| 112 // IsInteger32Constant returns true for smi constants, for example. | |
| 113 bool IsInteger32Constant(LConstantOperand* op) const; | |
| 114 bool IsSmi(LConstantOperand* op) const; | |
| 115 | |
| 116 int32_t ToInteger32(LConstantOperand* op) const; | |
| 117 Smi* ToSmi(LConstantOperand* op) const; | |
| 118 double ToDouble(LConstantOperand* op) const; | |
| 119 DoubleRegister ToDoubleRegister(LOperand* op) const; | |
| 120 | |
| 121 // Declare methods that deal with the individual node types. | |
| 122 #define DECLARE_DO(type) void Do##type(L##type* node); | |
| 123 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | |
| 124 #undef DECLARE_DO | |
| 125 | |
| 126 private: | |
| 127 // Return a double scratch register which can be used locally | |
| 128 // when generating code for a lithium instruction. | |
| 129 DoubleRegister double_scratch() { return crankshaft_fp_scratch; } | |
| 130 | |
| 131 // Deferred code support. | |
| 132 void DoDeferredNumberTagD(LNumberTagD* instr); | |
| 133 void DoDeferredStackCheck(LStackCheck* instr); | |
| 134 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
| 135 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
| 136 void DoDeferredMathAbsTagged(LMathAbsTagged* instr, | |
| 137 Label* exit, | |
| 138 Label* allocation_entry); | |
| 139 | |
| 140 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | |
| 141 void DoDeferredNumberTagU(LInstruction* instr, | |
| 142 LOperand* value, | |
| 143 LOperand* temp1, | |
| 144 LOperand* temp2); | |
| 145 void DoDeferredTaggedToI(LTaggedToI* instr, | |
| 146 LOperand* value, | |
| 147 LOperand* temp1, | |
| 148 LOperand* temp2); | |
| 149 void DoDeferredAllocate(LAllocate* instr); | |
| 150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); | |
| 151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | |
| 152 | |
| 153 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); | |
| 154 | |
| 155 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
| 156 void EmitGoto(int block); | |
| 157 void DoGap(LGap* instr); | |
| 158 | |
| 159 // Generic version of EmitBranch. It contains some code to avoid emitting a | |
| 160 // branch on the next emitted basic block where we could just fall-through. | |
| 161 // You shouldn't use that directly but rather consider one of the helper like | |
| 162 // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch... | |
| 163 template<class InstrType> | |
| 164 void EmitBranchGeneric(InstrType instr, | |
| 165 const BranchGenerator& branch); | |
| 166 | |
| 167 template<class InstrType> | |
| 168 void EmitBranch(InstrType instr, Condition condition); | |
| 169 | |
| 170 template<class InstrType> | |
| 171 void EmitCompareAndBranch(InstrType instr, | |
| 172 Condition condition, | |
| 173 const Register& lhs, | |
| 174 const Operand& rhs); | |
| 175 | |
| 176 template<class InstrType> | |
| 177 void EmitTestAndBranch(InstrType instr, | |
| 178 Condition condition, | |
| 179 const Register& value, | |
| 180 uint64_t mask); | |
| 181 | |
| 182 template<class InstrType> | |
| 183 void EmitBranchIfNonZeroNumber(InstrType instr, | |
| 184 const FPRegister& value, | |
| 185 const FPRegister& scratch); | |
| 186 | |
| 187 template<class InstrType> | |
| 188 void EmitBranchIfHeapNumber(InstrType instr, | |
| 189 const Register& value); | |
| 190 | |
| 191 template<class InstrType> | |
| 192 void EmitBranchIfRoot(InstrType instr, | |
| 193 const Register& value, | |
| 194 Heap::RootListIndex index); | |
| 195 | |
| 196 // Emits optimized code to deep-copy the contents of statically known object | |
| 197 // graphs (e.g. object literal boilerplate). Expects a pointer to the | |
| 198 // allocated destination object in the result register, and a pointer to the | |
| 199 // source object in the source register. | |
| 200 void EmitDeepCopy(Handle<JSObject> object, | |
| 201 Register result, | |
| 202 Register source, | |
| 203 Register scratch, | |
| 204 int* offset, | |
| 205 AllocationSiteMode mode); | |
| 206 | |
| 207 // Emits optimized code for %_IsString(x). Preserves input register. | |
| 208 // Returns the condition on which a final split to | |
| 209 // true and false label should be made, to optimize fallthrough. | |
| 210 Condition EmitIsString(Register input, Register temp1, Label* is_not_string, | |
| 211 SmiCheck check_needed); | |
| 212 | |
| 213 int DefineDeoptimizationLiteral(Handle<Object> literal); | |
| 214 void PopulateDeoptimizationData(Handle<Code> code); | |
| 215 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
| 216 | |
| 217 MemOperand BuildSeqStringOperand(Register string, | |
| 218 Register temp, | |
| 219 LOperand* index, | |
| 220 String::Encoding encoding); | |
| 221 void DeoptimizeBranch( | |
| 222 LEnvironment* environment, | |
| 223 BranchType branch_type, Register reg = NoReg, int bit = -1, | |
| 224 Deoptimizer::BailoutType* override_bailout_type = NULL); | |
| 225 void Deoptimize(LEnvironment* environment, | |
| 226 Deoptimizer::BailoutType* override_bailout_type = NULL); | |
| 227 void DeoptimizeIf(Condition cc, LEnvironment* environment); | |
| 228 void DeoptimizeIfZero(Register rt, LEnvironment* environment); | |
| 229 void DeoptimizeIfNegative(Register rt, LEnvironment* environment); | |
| 230 void DeoptimizeIfSmi(Register rt, LEnvironment* environment); | |
| 231 void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment); | |
| 232 void DeoptimizeIfRoot(Register rt, | |
| 233 Heap::RootListIndex index, | |
| 234 LEnvironment* environment); | |
| 235 void DeoptimizeIfNotRoot(Register rt, | |
| 236 Heap::RootListIndex index, | |
| 237 LEnvironment* environment); | |
| 238 void ApplyCheckIf(Condition cc, LBoundsCheck* check); | |
| 239 | |
| 240 MemOperand PrepareKeyedExternalArrayOperand(Register key, | |
| 241 Register base, | |
| 242 Register scratch, | |
| 243 bool key_is_smi, | |
| 244 bool key_is_constant, | |
| 245 int constant_key, | |
| 246 ElementsKind elements_kind, | |
| 247 int additional_index); | |
| 248 void CalcKeyedArrayBaseRegister(Register base, | |
| 249 Register elements, | |
| 250 Register key, | |
| 251 bool key_is_tagged, | |
| 252 ElementsKind elements_kind); | |
| 253 | |
| 254 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | |
| 255 Safepoint::DeoptMode mode); | |
| 256 | |
| 257 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | |
| 258 | |
| 259 void Abort(BailoutReason reason); | |
| 260 | |
| 261 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | |
| 262 | |
| 263 // Emit frame translation commands for an environment. | |
| 264 void WriteTranslation(LEnvironment* environment, Translation* translation); | |
| 265 | |
| 266 void AddToTranslation(LEnvironment* environment, | |
| 267 Translation* translation, | |
| 268 LOperand* op, | |
| 269 bool is_tagged, | |
| 270 bool is_uint32, | |
| 271 int* object_index_pointer, | |
| 272 int* dematerialized_index_pointer); | |
| 273 | |
| 274 void SaveCallerDoubles(); | |
| 275 void RestoreCallerDoubles(); | |
| 276 | |
| 277 // Code generation steps. Returns true if code generation should continue. | |
| 278 bool GeneratePrologue(); | |
| 279 bool GenerateDeferredCode(); | |
| 280 bool GenerateDeoptJumpTable(); | |
| 281 bool GenerateSafepointTable(); | |
| 282 | |
| 283 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | |
| 284 void GenerateOsrPrologue(); | |
| 285 | |
| 286 enum SafepointMode { | |
| 287 RECORD_SIMPLE_SAFEPOINT, | |
| 288 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | |
| 289 }; | |
| 290 | |
| 291 void CallCode(Handle<Code> code, | |
| 292 RelocInfo::Mode mode, | |
| 293 LInstruction* instr); | |
| 294 | |
| 295 void CallCodeGeneric(Handle<Code> code, | |
| 296 RelocInfo::Mode mode, | |
| 297 LInstruction* instr, | |
| 298 SafepointMode safepoint_mode); | |
| 299 | |
| 300 void CallRuntime(const Runtime::Function* function, | |
| 301 int num_arguments, | |
| 302 LInstruction* instr, | |
| 303 SaveFPRegsMode save_doubles = kDontSaveFPRegs); | |
| 304 | |
| 305 void CallRuntime(Runtime::FunctionId id, | |
| 306 int num_arguments, | |
| 307 LInstruction* instr) { | |
| 308 const Runtime::Function* function = Runtime::FunctionForId(id); | |
| 309 CallRuntime(function, num_arguments, instr); | |
| 310 } | |
| 311 | |
| 312 void LoadContextFromDeferred(LOperand* context); | |
| 313 void CallRuntimeFromDeferred(Runtime::FunctionId id, | |
| 314 int argc, | |
| 315 LInstruction* instr, | |
| 316 LOperand* context); | |
| 317 | |
| 318 // Generate a direct call to a known function. | |
| 319 // If the function is already loaded into x1 by the caller, function_reg may | |
| 320 // be set to x1. Otherwise, it must be NoReg, and CallKnownFunction will | |
| 321 // automatically load it. | |
| 322 void CallKnownFunction(Handle<JSFunction> function, | |
| 323 int formal_parameter_count, | |
| 324 int arity, | |
| 325 LInstruction* instr, | |
| 326 Register function_reg = NoReg); | |
| 327 | |
| 328 // Support for recording safepoint and position information. | |
| 329 void RecordAndWritePosition(int position) V8_OVERRIDE; | |
| 330 void RecordSafepoint(LPointerMap* pointers, | |
| 331 Safepoint::Kind kind, | |
| 332 int arguments, | |
| 333 Safepoint::DeoptMode mode); | |
| 334 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | |
| 335 void RecordSafepoint(Safepoint::DeoptMode mode); | |
| 336 void RecordSafepointWithRegisters(LPointerMap* pointers, | |
| 337 int arguments, | |
| 338 Safepoint::DeoptMode mode); | |
| 339 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, | |
| 340 int arguments, | |
| 341 Safepoint::DeoptMode mode); | |
| 342 void RecordSafepointWithLazyDeopt(LInstruction* instr, | |
| 343 SafepointMode safepoint_mode); | |
| 344 | |
| 345 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; | |
| 346 | |
| 347 ZoneList<LEnvironment*> deoptimizations_; | |
| 348 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_; | |
| 349 ZoneList<Handle<Object> > deoptimization_literals_; | |
| 350 int inlined_function_count_; | |
| 351 Scope* const scope_; | |
| 352 TranslationBuffer translations_; | |
| 353 ZoneList<LDeferredCode*> deferred_; | |
| 354 int osr_pc_offset_; | |
| 355 bool frame_is_built_; | |
| 356 | |
| 357 // Builder that keeps track of safepoints in the code. The table itself is | |
| 358 // emitted at the end of the generated code. | |
| 359 SafepointTableBuilder safepoints_; | |
| 360 | |
| 361 // Compiler from a set of parallel moves to a sequential list of moves. | |
| 362 LGapResolver resolver_; | |
| 363 | |
| 364 Safepoint::Kind expected_safepoint_kind_; | |
| 365 | |
| 366 int old_position_; | |
| 367 | |
| 368 class PushSafepointRegistersScope BASE_EMBEDDED { | |
| 369 public: | |
| 370 PushSafepointRegistersScope(LCodeGen* codegen, | |
| 371 Safepoint::Kind kind) | |
| 372 : codegen_(codegen) { | |
| 373 ASSERT(codegen_->info()->is_calling()); | |
| 374 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | |
| 375 codegen_->expected_safepoint_kind_ = kind; | |
| 376 | |
| 377 switch (codegen_->expected_safepoint_kind_) { | |
| 378 case Safepoint::kWithRegisters: | |
| 379 codegen_->masm_->PushSafepointRegisters(); | |
| 380 break; | |
| 381 case Safepoint::kWithRegistersAndDoubles: | |
| 382 codegen_->masm_->PushSafepointRegisters(); | |
| 383 codegen_->masm_->PushSafepointFPRegisters(); | |
| 384 break; | |
| 385 default: | |
| 386 UNREACHABLE(); | |
| 387 } | |
| 388 } | |
| 389 | |
| 390 ~PushSafepointRegistersScope() { | |
| 391 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; | |
| 392 ASSERT((kind & Safepoint::kWithRegisters) != 0); | |
| 393 switch (kind) { | |
| 394 case Safepoint::kWithRegisters: | |
| 395 codegen_->masm_->PopSafepointRegisters(); | |
| 396 break; | |
| 397 case Safepoint::kWithRegistersAndDoubles: | |
| 398 codegen_->masm_->PopSafepointFPRegisters(); | |
| 399 codegen_->masm_->PopSafepointRegisters(); | |
| 400 break; | |
| 401 default: | |
| 402 UNREACHABLE(); | |
| 403 } | |
| 404 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | |
| 405 } | |
| 406 | |
| 407 private: | |
| 408 LCodeGen* codegen_; | |
| 409 }; | |
| 410 | |
| 411 friend class LDeferredCode; | |
| 412 friend class SafepointGenerator; | |
| 413 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | |
| 414 }; | |
| 415 | |
| 416 | |
| 417 class LDeferredCode: public ZoneObject { | |
| 418 public: | |
| 419 explicit LDeferredCode(LCodeGen* codegen) | |
| 420 : codegen_(codegen), | |
| 421 external_exit_(NULL), | |
| 422 instruction_index_(codegen->current_instruction_) { | |
| 423 codegen->AddDeferredCode(this); | |
| 424 } | |
| 425 | |
| 426 virtual ~LDeferredCode() { } | |
| 427 virtual void Generate() = 0; | |
| 428 virtual LInstruction* instr() = 0; | |
| 429 | |
| 430 void SetExit(Label* exit) { external_exit_ = exit; } | |
| 431 Label* entry() { return &entry_; } | |
| 432 Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; } | |
| 433 int instruction_index() const { return instruction_index_; } | |
| 434 | |
| 435 protected: | |
| 436 LCodeGen* codegen() const { return codegen_; } | |
| 437 MacroAssembler* masm() const { return codegen_->masm(); } | |
| 438 | |
| 439 private: | |
| 440 LCodeGen* codegen_; | |
| 441 Label entry_; | |
| 442 Label exit_; | |
| 443 Label* external_exit_; | |
| 444 int instruction_index_; | |
| 445 }; | |
| 446 | |
| 447 | |
| 448 // This is the abstract class used by EmitBranchGeneric. | |
| 449 // It is used to emit code for conditional branching. The Emit() function | |
| 450 // emits code to branch when the condition holds and EmitInverted() emits | |
| 451 // the branch when the inverted condition is verified. | |
| 452 // | |
| 453 // For actual examples of condition see the concrete implementation in | |
| 454 // lithium-codegen-a64.cc (e.g. BranchOnCondition, CompareAndBranch). | |
| 455 class BranchGenerator BASE_EMBEDDED { | |
| 456 public: | |
| 457 explicit BranchGenerator(LCodeGen* codegen) | |
| 458 : codegen_(codegen) { } | |
| 459 | |
| 460 virtual ~BranchGenerator() { } | |
| 461 | |
| 462 virtual void Emit(Label* label) const = 0; | |
| 463 virtual void EmitInverted(Label* label) const = 0; | |
| 464 | |
| 465 protected: | |
| 466 MacroAssembler* masm() const { return codegen_->masm(); } | |
| 467 | |
| 468 LCodeGen* codegen_; | |
| 469 }; | |
| 470 | |
| 471 } } // namespace v8::internal | |
| 472 | |
| 473 #endif // V8_A64_LITHIUM_CODEGEN_A64_H_ | |
| OLD | NEW |