| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 7 | 7 |
| 8 #include "src/arm64/lithium-arm64.h" | 8 #include "src/arm64/lithium-arm64.h" |
| 9 | 9 |
| 10 #include "src/arm64/lithium-gap-resolver-arm64.h" | 10 #include "src/arm64/lithium-gap-resolver-arm64.h" |
| (...skipping 19 matching lines...) Expand all Loading... |
| 30 jump_table_(4, info->zone()), | 30 jump_table_(4, info->zone()), |
| 31 deoptimization_literals_(8, info->zone()), | 31 deoptimization_literals_(8, info->zone()), |
| 32 inlined_function_count_(0), | 32 inlined_function_count_(0), |
| 33 scope_(info->scope()), | 33 scope_(info->scope()), |
| 34 translations_(info->zone()), | 34 translations_(info->zone()), |
| 35 deferred_(8, info->zone()), | 35 deferred_(8, info->zone()), |
| 36 osr_pc_offset_(-1), | 36 osr_pc_offset_(-1), |
| 37 frame_is_built_(false), | 37 frame_is_built_(false), |
| 38 safepoints_(info->zone()), | 38 safepoints_(info->zone()), |
| 39 resolver_(this), | 39 resolver_(this), |
| 40 expected_safepoint_kind_(Safepoint::kSimple), | 40 expected_safepoint_kind_(Safepoint::kSimple) { |
| 41 after_push_argument_(false), | |
| 42 inlined_arguments_(false) { | |
| 43 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 41 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 44 } | 42 } |
| 45 | 43 |
| 46 ~LCodeGen() { | |
| 47 DCHECK(!after_push_argument_ || inlined_arguments_); | |
| 48 } | |
| 49 | |
| 50 // Simple accessors. | 44 // Simple accessors. |
| 51 Scope* scope() const { return scope_; } | 45 Scope* scope() const { return scope_; } |
| 52 | 46 |
| 53 int LookupDestination(int block_id) const { | 47 int LookupDestination(int block_id) const { |
| 54 return chunk()->LookupDestination(block_id); | 48 return chunk()->LookupDestination(block_id); |
| 55 } | 49 } |
| 56 | 50 |
| 57 bool IsNextEmittedBlock(int block_id) const { | 51 bool IsNextEmittedBlock(int block_id) const { |
| 58 return LookupDestination(block_id) == GetNextEmittedBlock(); | 52 return LookupDestination(block_id) == GetNextEmittedBlock(); |
| 59 } | 53 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 80 // Finish the code by setting stack height, safepoint, and bailout | 74 // Finish the code by setting stack height, safepoint, and bailout |
| 81 // information on it. | 75 // information on it. |
| 82 void FinishCode(Handle<Code> code); | 76 void FinishCode(Handle<Code> code); |
| 83 | 77 |
| 84 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | 78 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; |
| 85 // Support for converting LOperands to assembler types. | 79 // Support for converting LOperands to assembler types. |
| 86 Register ToRegister(LOperand* op) const; | 80 Register ToRegister(LOperand* op) const; |
| 87 Register ToRegister32(LOperand* op) const; | 81 Register ToRegister32(LOperand* op) const; |
| 88 Operand ToOperand(LOperand* op); | 82 Operand ToOperand(LOperand* op); |
| 89 Operand ToOperand32(LOperand* op); | 83 Operand ToOperand32(LOperand* op); |
| 90 enum StackMode { kMustUseFramePointer, kCanUseStackPointer }; | 84 MemOperand ToMemOperand(LOperand* op) const; |
| 91 MemOperand ToMemOperand(LOperand* op, | |
| 92 StackMode stack_mode = kCanUseStackPointer) const; | |
| 93 Handle<Object> ToHandle(LConstantOperand* op) const; | 85 Handle<Object> ToHandle(LConstantOperand* op) const; |
| 94 | 86 |
| 95 template <class LI> | 87 template <class LI> |
| 96 Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info); | 88 Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info); |
| 97 | 89 |
| 98 int JSShiftAmountFromLConstant(LOperand* constant) { | 90 int JSShiftAmountFromLConstant(LOperand* constant) { |
| 99 return ToInteger32(LConstantOperand::cast(constant)) & 0x1f; | 91 return ToInteger32(LConstantOperand::cast(constant)) & 0x1f; |
| 100 } | 92 } |
| 101 | 93 |
| 102 // TODO(jbramley): Examine these helpers and check that they make sense. | 94 // TODO(jbramley): Examine these helpers and check that they make sense. |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 | 351 |
| 360 // Builder that keeps track of safepoints in the code. The table itself is | 352 // Builder that keeps track of safepoints in the code. The table itself is |
| 361 // emitted at the end of the generated code. | 353 // emitted at the end of the generated code. |
| 362 SafepointTableBuilder safepoints_; | 354 SafepointTableBuilder safepoints_; |
| 363 | 355 |
| 364 // Compiler from a set of parallel moves to a sequential list of moves. | 356 // Compiler from a set of parallel moves to a sequential list of moves. |
| 365 LGapResolver resolver_; | 357 LGapResolver resolver_; |
| 366 | 358 |
| 367 Safepoint::Kind expected_safepoint_kind_; | 359 Safepoint::Kind expected_safepoint_kind_; |
| 368 | 360 |
| 369 // This flag is true when we are after a push (but before a call). | |
| 370 // In this situation, jssp no longer references the end of the stack slots so, | |
| 371 // we can only reference a stack slot via fp. | |
| 372 bool after_push_argument_; | |
| 373 // If we have inlined arguments, we are no longer able to use jssp because | |
| 374 // jssp is modified and we never know if we are in a block after or before | |
| 375 // the pop of the arguments (which restores jssp). | |
| 376 bool inlined_arguments_; | |
| 377 | |
| 378 int old_position_; | 361 int old_position_; |
| 379 | 362 |
| 380 class PushSafepointRegistersScope BASE_EMBEDDED { | 363 class PushSafepointRegistersScope BASE_EMBEDDED { |
| 381 public: | 364 public: |
| 382 explicit PushSafepointRegistersScope(LCodeGen* codegen) | 365 explicit PushSafepointRegistersScope(LCodeGen* codegen) |
| 383 : codegen_(codegen) { | 366 : codegen_(codegen) { |
| 384 DCHECK(codegen_->info()->is_calling()); | 367 DCHECK(codegen_->info()->is_calling()); |
| 385 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | 368 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); |
| 386 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; | 369 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; |
| 387 | 370 |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 462 | 445 |
| 463 protected: | 446 protected: |
| 464 MacroAssembler* masm() const { return codegen_->masm(); } | 447 MacroAssembler* masm() const { return codegen_->masm(); } |
| 465 | 448 |
| 466 LCodeGen* codegen_; | 449 LCodeGen* codegen_; |
| 467 }; | 450 }; |
| 468 | 451 |
| 469 } } // namespace v8::internal | 452 } } // namespace v8::internal |
| 470 | 453 |
| 471 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 454 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| OLD | NEW |