| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 7 | 7 |
| 8 #include "arm64/lithium-arm64.h" | 8 #include "arm64/lithium-arm64.h" |
| 9 | 9 |
| 10 #include "arm64/lithium-gap-resolver-arm64.h" | 10 #include "arm64/lithium-gap-resolver-arm64.h" |
| (...skipping 19 matching lines...) Expand all Loading... |
| 30 deopt_jump_table_(4, info->zone()), | 30 deopt_jump_table_(4, info->zone()), |
| 31 deoptimization_literals_(8, info->zone()), | 31 deoptimization_literals_(8, info->zone()), |
| 32 inlined_function_count_(0), | 32 inlined_function_count_(0), |
| 33 scope_(info->scope()), | 33 scope_(info->scope()), |
| 34 translations_(info->zone()), | 34 translations_(info->zone()), |
| 35 deferred_(8, info->zone()), | 35 deferred_(8, info->zone()), |
| 36 osr_pc_offset_(-1), | 36 osr_pc_offset_(-1), |
| 37 frame_is_built_(false), | 37 frame_is_built_(false), |
| 38 safepoints_(info->zone()), | 38 safepoints_(info->zone()), |
| 39 resolver_(this), | 39 resolver_(this), |
| 40 expected_safepoint_kind_(Safepoint::kSimple) { | 40 expected_safepoint_kind_(Safepoint::kSimple), |
| 41 after_push_argument_(false), |
| 42 inlined_arguments_(false) { |
| 41 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 43 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 42 } | 44 } |
| 43 | 45 |
| 46 ~LCodeGen() { |
| 47 ASSERT(!after_push_argument_ || inlined_arguments_); |
| 48 } |
| 49 |
| 44 // Simple accessors. | 50 // Simple accessors. |
| 45 Scope* scope() const { return scope_; } | 51 Scope* scope() const { return scope_; } |
| 46 | 52 |
| 47 int LookupDestination(int block_id) const { | 53 int LookupDestination(int block_id) const { |
| 48 return chunk()->LookupDestination(block_id); | 54 return chunk()->LookupDestination(block_id); |
| 49 } | 55 } |
| 50 | 56 |
| 51 bool IsNextEmittedBlock(int block_id) const { | 57 bool IsNextEmittedBlock(int block_id) const { |
| 52 return LookupDestination(block_id) == GetNextEmittedBlock(); | 58 return LookupDestination(block_id) == GetNextEmittedBlock(); |
| 53 } | 59 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 75 // information on it. | 81 // information on it. |
| 76 void FinishCode(Handle<Code> code); | 82 void FinishCode(Handle<Code> code); |
| 77 | 83 |
| 78 // Support for converting LOperands to assembler types. | 84 // Support for converting LOperands to assembler types. |
| 79 // LOperand must be a register. | 85 // LOperand must be a register. |
| 80 Register ToRegister(LOperand* op) const; | 86 Register ToRegister(LOperand* op) const; |
| 81 Register ToRegister32(LOperand* op) const; | 87 Register ToRegister32(LOperand* op) const; |
| 82 Operand ToOperand(LOperand* op); | 88 Operand ToOperand(LOperand* op); |
| 83 Operand ToOperand32I(LOperand* op); | 89 Operand ToOperand32I(LOperand* op); |
| 84 Operand ToOperand32U(LOperand* op); | 90 Operand ToOperand32U(LOperand* op); |
| 85 MemOperand ToMemOperand(LOperand* op) const; | 91 enum StackMode { kMustUseFramePointer, kCanUseStackPointer }; |
| 92 MemOperand ToMemOperand(LOperand* op, |
| 93 StackMode stack_mode = kCanUseStackPointer) const; |
| 86 Handle<Object> ToHandle(LConstantOperand* op) const; | 94 Handle<Object> ToHandle(LConstantOperand* op) const; |
| 87 | 95 |
| 88 // TODO(jbramley): Examine these helpers and check that they make sense. | 96 // TODO(jbramley): Examine these helpers and check that they make sense. |
| 89 // IsInteger32Constant returns true for smi constants, for example. | 97 // IsInteger32Constant returns true for smi constants, for example. |
| 90 bool IsInteger32Constant(LConstantOperand* op) const; | 98 bool IsInteger32Constant(LConstantOperand* op) const; |
| 91 bool IsSmi(LConstantOperand* op) const; | 99 bool IsSmi(LConstantOperand* op) const; |
| 92 | 100 |
| 93 int32_t ToInteger32(LConstantOperand* op) const; | 101 int32_t ToInteger32(LConstantOperand* op) const; |
| 94 Smi* ToSmi(LConstantOperand* op) const; | 102 Smi* ToSmi(LConstantOperand* op) const; |
| 95 double ToDouble(LConstantOperand* op) const; | 103 double ToDouble(LConstantOperand* op) const; |
| (...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 341 | 349 |
| 342 // Builder that keeps track of safepoints in the code. The table itself is | 350 // Builder that keeps track of safepoints in the code. The table itself is |
| 343 // emitted at the end of the generated code. | 351 // emitted at the end of the generated code. |
| 344 SafepointTableBuilder safepoints_; | 352 SafepointTableBuilder safepoints_; |
| 345 | 353 |
| 346 // Compiler from a set of parallel moves to a sequential list of moves. | 354 // Compiler from a set of parallel moves to a sequential list of moves. |
| 347 LGapResolver resolver_; | 355 LGapResolver resolver_; |
| 348 | 356 |
| 349 Safepoint::Kind expected_safepoint_kind_; | 357 Safepoint::Kind expected_safepoint_kind_; |
| 350 | 358 |
| 359 // This flag is true when we are after a push (but before a call). |
| 360 // In this situation, jssp no longer references the end of the stack slots so, |
| 361 // we can only reference a stack slot via fp. |
| 362 bool after_push_argument_; |
| 363 // If we have inlined arguments, we are no longer able to use jssp because |
| 364 // jssp is modified and we never know if we are in a block after or before |
| 365 // the pop of the arguments (which restores jssp). |
| 366 bool inlined_arguments_; |
| 367 |
| 351 int old_position_; | 368 int old_position_; |
| 352 | 369 |
| 353 class PushSafepointRegistersScope BASE_EMBEDDED { | 370 class PushSafepointRegistersScope BASE_EMBEDDED { |
| 354 public: | 371 public: |
| 355 PushSafepointRegistersScope(LCodeGen* codegen, | 372 PushSafepointRegistersScope(LCodeGen* codegen, |
| 356 Safepoint::Kind kind) | 373 Safepoint::Kind kind) |
| 357 : codegen_(codegen) { | 374 : codegen_(codegen) { |
| 358 ASSERT(codegen_->info()->is_calling()); | 375 ASSERT(codegen_->info()->is_calling()); |
| 359 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | 376 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); |
| 360 codegen_->expected_safepoint_kind_ = kind; | 377 codegen_->expected_safepoint_kind_ = kind; |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 461 | 478 |
| 462 protected: | 479 protected: |
| 463 MacroAssembler* masm() const { return codegen_->masm(); } | 480 MacroAssembler* masm() const { return codegen_->masm(); } |
| 464 | 481 |
| 465 LCodeGen* codegen_; | 482 LCodeGen* codegen_; |
| 466 }; | 483 }; |
| 467 | 484 |
| 468 } } // namespace v8::internal | 485 } } // namespace v8::internal |
| 469 | 486 |
| 470 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 487 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| OLD | NEW |