| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| 7 | 7 |
| 8 #include "src/arm64/lithium-arm64.h" | 8 #include "src/arm64/lithium-arm64.h" |
| 9 | 9 |
| 10 #include "src/arm64/lithium-gap-resolver-arm64.h" | 10 #include "src/arm64/lithium-gap-resolver-arm64.h" |
| (...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 341 void RecordAndWritePosition(int position) V8_OVERRIDE; | 341 void RecordAndWritePosition(int position) V8_OVERRIDE; |
| 342 void RecordSafepoint(LPointerMap* pointers, | 342 void RecordSafepoint(LPointerMap* pointers, |
| 343 Safepoint::Kind kind, | 343 Safepoint::Kind kind, |
| 344 int arguments, | 344 int arguments, |
| 345 Safepoint::DeoptMode mode); | 345 Safepoint::DeoptMode mode); |
| 346 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | 346 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); |
| 347 void RecordSafepoint(Safepoint::DeoptMode mode); | 347 void RecordSafepoint(Safepoint::DeoptMode mode); |
| 348 void RecordSafepointWithRegisters(LPointerMap* pointers, | 348 void RecordSafepointWithRegisters(LPointerMap* pointers, |
| 349 int arguments, | 349 int arguments, |
| 350 Safepoint::DeoptMode mode); | 350 Safepoint::DeoptMode mode); |
| 351 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, | |
| 352 int arguments, | |
| 353 Safepoint::DeoptMode mode); | |
| 354 void RecordSafepointWithLazyDeopt(LInstruction* instr, | 351 void RecordSafepointWithLazyDeopt(LInstruction* instr, |
| 355 SafepointMode safepoint_mode); | 352 SafepointMode safepoint_mode); |
| 356 | 353 |
| 357 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; | 354 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; |
| 358 | 355 |
| 359 ZoneList<LEnvironment*> deoptimizations_; | 356 ZoneList<LEnvironment*> deoptimizations_; |
| 360 ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_; | 357 ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_; |
| 361 ZoneList<Handle<Object> > deoptimization_literals_; | 358 ZoneList<Handle<Object> > deoptimization_literals_; |
| 362 int inlined_function_count_; | 359 int inlined_function_count_; |
| 363 Scope* const scope_; | 360 Scope* const scope_; |
| (...skipping 17 matching lines...) Expand all Loading... |
| 381 bool after_push_argument_; | 378 bool after_push_argument_; |
| 382 // If we have inlined arguments, we are no longer able to use jssp because | 379 // If we have inlined arguments, we are no longer able to use jssp because |
| 383 // jssp is modified and we never know if we are in a block after or before | 380 // jssp is modified and we never know if we are in a block after or before |
| 384 // the pop of the arguments (which restores jssp). | 381 // the pop of the arguments (which restores jssp). |
| 385 bool inlined_arguments_; | 382 bool inlined_arguments_; |
| 386 | 383 |
| 387 int old_position_; | 384 int old_position_; |
| 388 | 385 |
| 389 class PushSafepointRegistersScope BASE_EMBEDDED { | 386 class PushSafepointRegistersScope BASE_EMBEDDED { |
| 390 public: | 387 public: |
| 391 PushSafepointRegistersScope(LCodeGen* codegen, | 388 explicit PushSafepointRegistersScope(LCodeGen* codegen) |
| 392 Safepoint::Kind kind) | |
| 393 : codegen_(codegen) { | 389 : codegen_(codegen) { |
| 394 ASSERT(codegen_->info()->is_calling()); | 390 ASSERT(codegen_->info()->is_calling()); |
| 395 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | 391 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); |
| 396 codegen_->expected_safepoint_kind_ = kind; | 392 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; |
| 397 | 393 |
| 398 UseScratchRegisterScope temps(codegen_->masm_); | 394 UseScratchRegisterScope temps(codegen_->masm_); |
| 399 // Preserve the value of lr which must be saved on the stack (the call to | 395 // Preserve the value of lr which must be saved on the stack (the call to |
| 400 // the stub will clobber it). | 396 // the stub will clobber it). |
| 401 Register to_be_pushed_lr = | 397 Register to_be_pushed_lr = |
| 402 temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr()); | 398 temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr()); |
| 403 codegen_->masm_->Mov(to_be_pushed_lr, lr); | 399 codegen_->masm_->Mov(to_be_pushed_lr, lr); |
| 404 switch (codegen_->expected_safepoint_kind_) { | 400 StoreRegistersStateStub stub(codegen_->isolate()); |
| 405 case Safepoint::kWithRegisters: { | 401 codegen_->masm_->CallStub(&stub); |
| 406 StoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs); | |
| 407 codegen_->masm_->CallStub(&stub); | |
| 408 break; | |
| 409 } | |
| 410 case Safepoint::kWithRegistersAndDoubles: { | |
| 411 StoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs); | |
| 412 codegen_->masm_->CallStub(&stub); | |
| 413 break; | |
| 414 } | |
| 415 default: | |
| 416 UNREACHABLE(); | |
| 417 } | |
| 418 } | 402 } |
| 419 | 403 |
| 420 ~PushSafepointRegistersScope() { | 404 ~PushSafepointRegistersScope() { |
| 421 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; | 405 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); |
| 422 ASSERT((kind & Safepoint::kWithRegisters) != 0); | 406 RestoreRegistersStateStub stub(codegen_->isolate()); |
| 423 switch (kind) { | 407 codegen_->masm_->CallStub(&stub); |
| 424 case Safepoint::kWithRegisters: { | |
| 425 RestoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs); | |
| 426 codegen_->masm_->CallStub(&stub); | |
| 427 break; | |
| 428 } | |
| 429 case Safepoint::kWithRegistersAndDoubles: { | |
| 430 RestoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs); | |
| 431 codegen_->masm_->CallStub(&stub); | |
| 432 break; | |
| 433 } | |
| 434 default: | |
| 435 UNREACHABLE(); | |
| 436 } | |
| 437 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | 408 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; |
| 438 } | 409 } |
| 439 | 410 |
| 440 private: | 411 private: |
| 441 LCodeGen* codegen_; | 412 LCodeGen* codegen_; |
| 442 }; | 413 }; |
| 443 | 414 |
| 444 friend class LDeferredCode; | 415 friend class LDeferredCode; |
| 445 friend class SafepointGenerator; | 416 friend class SafepointGenerator; |
| 446 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | 417 DISALLOW_COPY_AND_ASSIGN(LCodeGen); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 497 | 468 |
| 498 protected: | 469 protected: |
| 499 MacroAssembler* masm() const { return codegen_->masm(); } | 470 MacroAssembler* masm() const { return codegen_->masm(); } |
| 500 | 471 |
| 501 LCodeGen* codegen_; | 472 LCodeGen* codegen_; |
| 502 }; | 473 }; |
| 503 | 474 |
| 504 } } // namespace v8::internal | 475 } } // namespace v8::internal |
| 505 | 476 |
| 506 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 477 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
| OLD | NEW |