OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/arm64/macro-assembler-arm64.h" | 8 #include "src/arm64/macro-assembler-arm64.h" |
9 #include "src/ast/scopes.h" | 9 #include "src/ast/scopes.h" |
10 #include "src/compiler/code-generator-impl.h" | 10 #include "src/compiler/code-generator-impl.h" |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
272 public: | 272 public: |
273 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index, | 273 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index, |
274 Register value, Register scratch0, Register scratch1, | 274 Register value, Register scratch0, Register scratch1, |
275 RecordWriteMode mode) | 275 RecordWriteMode mode) |
276 : OutOfLineCode(gen), | 276 : OutOfLineCode(gen), |
277 object_(object), | 277 object_(object), |
278 index_(index), | 278 index_(index), |
279 value_(value), | 279 value_(value), |
280 scratch0_(scratch0), | 280 scratch0_(scratch0), |
281 scratch1_(scratch1), | 281 scratch1_(scratch1), |
282 mode_(mode) {} | 282 mode_(mode), |
| 283 must_save_lr_(!gen->frame_access_state()->has_frame()) {} |
283 | 284 |
284 void Generate() final { | 285 void Generate() final { |
285 if (mode_ > RecordWriteMode::kValueIsPointer) { | 286 if (mode_ > RecordWriteMode::kValueIsPointer) { |
286 __ JumpIfSmi(value_, exit()); | 287 __ JumpIfSmi(value_, exit()); |
287 } | 288 } |
288 __ CheckPageFlagClear(value_, scratch0_, | 289 __ CheckPageFlagClear(value_, scratch0_, |
289 MemoryChunk::kPointersToHereAreInterestingMask, | 290 MemoryChunk::kPointersToHereAreInterestingMask, |
290 exit()); | 291 exit()); |
291 RememberedSetAction const remembered_set_action = | 292 RememberedSetAction const remembered_set_action = |
292 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET | 293 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET |
293 : OMIT_REMEMBERED_SET; | 294 : OMIT_REMEMBERED_SET; |
294 SaveFPRegsMode const save_fp_mode = | 295 SaveFPRegsMode const save_fp_mode = |
295 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; | 296 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; |
296 if (!frame()->needs_frame()) { | 297 if (must_save_lr_) { |
297 // We need to save and restore lr if the frame was elided. | 298 // We need to save and restore lr if the frame was elided. |
298 __ Push(lr); | 299 __ Push(lr); |
299 } | 300 } |
300 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, | 301 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, |
301 remembered_set_action, save_fp_mode); | 302 remembered_set_action, save_fp_mode); |
302 __ Add(scratch1_, object_, index_); | 303 __ Add(scratch1_, object_, index_); |
303 __ CallStub(&stub); | 304 __ CallStub(&stub); |
304 if (!frame()->needs_frame()) { | 305 if (must_save_lr_) { |
305 __ Pop(lr); | 306 __ Pop(lr); |
306 } | 307 } |
307 } | 308 } |
308 | 309 |
309 private: | 310 private: |
310 Register const object_; | 311 Register const object_; |
311 Operand const index_; | 312 Operand const index_; |
312 Register const value_; | 313 Register const value_; |
313 Register const scratch0_; | 314 Register const scratch0_; |
314 Register const scratch1_; | 315 Register const scratch1_; |
315 RecordWriteMode const mode_; | 316 RecordWriteMode const mode_; |
| 317 bool must_save_lr_; |
316 }; | 318 }; |
317 | 319 |
318 | 320 |
319 Condition FlagsConditionToCondition(FlagsCondition condition) { | 321 Condition FlagsConditionToCondition(FlagsCondition condition) { |
320 switch (condition) { | 322 switch (condition) { |
321 case kEqual: | 323 case kEqual: |
322 return eq; | 324 return eq; |
323 case kNotEqual: | 325 case kNotEqual: |
324 return ne; | 326 return ne; |
325 case kSignedLessThan: | 327 case kSignedLessThan: |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
459 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ | 461 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ |
460 i.InputRegister##width(1)); \ | 462 i.InputRegister##width(1)); \ |
461 } else { \ | 463 } else { \ |
462 uint32_t imm = \ | 464 uint32_t imm = \ |
463 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \ | 465 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \ |
464 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ | 466 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ |
465 imm % (width)); \ | 467 imm % (width)); \ |
466 } \ | 468 } \ |
467 } while (0) | 469 } while (0) |
468 | 470 |
| 471 void CodeGenerator::AssembleDeconstructFrame() { |
| 472 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 473 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) { |
| 474 __ Mov(csp, fp); |
| 475 } else { |
| 476 __ Mov(jssp, fp); |
| 477 } |
| 478 __ Pop(fp, lr); |
| 479 } |
469 | 480 |
470 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 481 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
471 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 482 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
472 if (sp_slot_delta > 0) { | 483 if (sp_slot_delta > 0) { |
473 __ Drop(sp_slot_delta); | 484 __ Drop(sp_slot_delta); |
474 } | 485 } |
475 frame_access_state()->SetFrameAccessToDefault(); | 486 frame_access_state()->SetFrameAccessToDefault(); |
476 } | 487 } |
477 | 488 |
478 | 489 |
479 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 490 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
480 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 491 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
481 if (sp_slot_delta < 0) { | 492 if (sp_slot_delta < 0) { |
482 __ Claim(-sp_slot_delta); | 493 __ Claim(-sp_slot_delta); |
483 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 494 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
484 } | 495 } |
485 if (frame()->needs_frame()) { | 496 if (frame_access_state()->has_frame()) { |
486 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 497 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
487 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 498 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
488 } | 499 } |
489 frame_access_state()->SetFrameAccessToSP(); | 500 frame_access_state()->SetFrameAccessToSP(); |
490 } | 501 } |
491 | 502 |
492 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 503 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
493 Register scratch1, | 504 Register scratch1, |
494 Register scratch2, | 505 Register scratch2, |
495 Register scratch3) { | 506 Register scratch3) { |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
665 case kArchRet: | 676 case kArchRet: |
666 AssembleReturn(); | 677 AssembleReturn(); |
667 break; | 678 break; |
668 case kArchStackPointer: | 679 case kArchStackPointer: |
669 __ mov(i.OutputRegister(), masm()->StackPointer()); | 680 __ mov(i.OutputRegister(), masm()->StackPointer()); |
670 break; | 681 break; |
671 case kArchFramePointer: | 682 case kArchFramePointer: |
672 __ mov(i.OutputRegister(), fp); | 683 __ mov(i.OutputRegister(), fp); |
673 break; | 684 break; |
674 case kArchParentFramePointer: | 685 case kArchParentFramePointer: |
675 if (frame_access_state()->frame()->needs_frame()) { | 686 if (frame_access_state()->has_frame()) { |
676 __ ldr(i.OutputRegister(), MemOperand(fp, 0)); | 687 __ ldr(i.OutputRegister(), MemOperand(fp, 0)); |
677 } else { | 688 } else { |
678 __ mov(i.OutputRegister(), fp); | 689 __ mov(i.OutputRegister(), fp); |
679 } | 690 } |
680 break; | 691 break; |
681 case kArchTruncateDoubleToI: | 692 case kArchTruncateDoubleToI: |
682 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); | 693 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); |
683 break; | 694 break; |
684 case kArchStoreWithWriteBarrier: { | 695 case kArchStoreWithWriteBarrier: { |
685 RecordWriteMode mode = | 696 RecordWriteMode mode = |
(...skipping 799 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1485 } | 1496 } |
1486 | 1497 |
1487 | 1498 |
1488 void CodeGenerator::AssembleDeoptimizerCall( | 1499 void CodeGenerator::AssembleDeoptimizerCall( |
1489 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 1500 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { |
1490 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 1501 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( |
1491 isolate(), deoptimization_id, bailout_type); | 1502 isolate(), deoptimization_id, bailout_type); |
1492 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1503 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
1493 } | 1504 } |
1494 | 1505 |
| 1506 void CodeGenerator::AssembleSetupStackPointer() { |
| 1507 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1508 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) { |
| 1509 __ SetStackPointer(csp); |
| 1510 } else { |
| 1511 __ SetStackPointer(jssp); |
| 1512 } |
| 1513 } |
1495 | 1514 |
1496 void CodeGenerator::AssemblePrologue() { | 1515 void CodeGenerator::AssemblePrologue() { |
1497 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1516 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1498 if (descriptor->UseNativeStack()) { | 1517 if (descriptor->UseNativeStack()) { |
1499 __ AssertCspAligned(); | 1518 __ AssertCspAligned(); |
1500 } | 1519 } |
1501 | 1520 |
1502 frame()->AlignFrame(16); | |
1503 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1521 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1504 if (frame()->needs_frame()) { | 1522 if (frame_access_state()->has_frame()) { |
1505 if (descriptor->IsJSFunctionCall()) { | 1523 if (descriptor->IsJSFunctionCall()) { |
1506 DCHECK(!descriptor->UseNativeStack()); | 1524 DCHECK(!descriptor->UseNativeStack()); |
1507 __ SetStackPointer(jssp); | |
1508 __ Prologue(this->info()->GeneratePreagedPrologue()); | 1525 __ Prologue(this->info()->GeneratePreagedPrologue()); |
1509 } else { | 1526 } else { |
1510 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) { | |
1511 __ SetStackPointer(csp); | |
1512 } else { | |
1513 __ SetStackPointer(jssp); | |
1514 } | |
1515 if (descriptor->IsCFunctionCall()) { | 1527 if (descriptor->IsCFunctionCall()) { |
1516 __ Push(lr, fp); | 1528 __ Push(lr, fp); |
1517 __ Mov(fp, masm_.StackPointer()); | 1529 __ Mov(fp, masm_.StackPointer()); |
1518 __ Claim(stack_shrink_slots); | 1530 __ Claim(stack_shrink_slots); |
1519 } else { | 1531 } else { |
1520 __ StubPrologue(info()->GetOutputStackFrameType(), | 1532 __ StubPrologue(info()->GetOutputStackFrameType(), |
1521 frame()->GetTotalFrameSlotCount()); | 1533 frame()->GetTotalFrameSlotCount()); |
1522 } | 1534 } |
1523 } | 1535 } |
1524 } else { | |
1525 if (descriptor->UseNativeStack()) { | |
1526 __ SetStackPointer(csp); | |
1527 } else { | |
1528 __ SetStackPointer(jssp); | |
1529 } | |
1530 frame()->SetElidedFrameSizeInSlots(0); | |
1531 } | 1536 } |
1532 frame_access_state()->SetFrameAccessToDefault(); | 1537 |
1533 if (info()->is_osr()) { | 1538 if (info()->is_osr()) { |
1534 // TurboFan OSR-compiled functions cannot be entered directly. | 1539 // TurboFan OSR-compiled functions cannot be entered directly. |
1535 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1540 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1536 | 1541 |
1537 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1542 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1538 // frame is still on the stack. Optimized code uses OSR values directly from | 1543 // frame is still on the stack. Optimized code uses OSR values directly from |
1539 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1544 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1540 // remaining stack slots. | 1545 // remaining stack slots. |
1541 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); | 1546 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); |
1542 osr_pc_offset_ = __ pc_offset(); | 1547 osr_pc_offset_ = __ pc_offset(); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1583 | 1588 |
1584 // Restore fp registers. | 1589 // Restore fp registers. |
1585 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 1590 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, |
1586 descriptor->CalleeSavedFPRegisters()); | 1591 descriptor->CalleeSavedFPRegisters()); |
1587 if (saves_fp.Count() != 0) { | 1592 if (saves_fp.Count() != 0) { |
1588 __ PopCPURegList(saves_fp); | 1593 __ PopCPURegList(saves_fp); |
1589 } | 1594 } |
1590 | 1595 |
1591 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 1596 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
1592 if (descriptor->IsCFunctionCall()) { | 1597 if (descriptor->IsCFunctionCall()) { |
1593 __ Mov(csp, fp); | 1598 AssembleDeconstructFrame(); |
1594 __ Pop(fp, lr); | 1599 } else if (frame_access_state()->has_frame()) { |
1595 } else if (frame()->needs_frame()) { | |
1596 // Canonicalize JSFunction return sites for now. | 1600 // Canonicalize JSFunction return sites for now. |
1597 if (return_label_.is_bound()) { | 1601 if (return_label_.is_bound()) { |
1598 __ B(&return_label_); | 1602 __ B(&return_label_); |
1599 return; | 1603 return; |
1600 } else { | 1604 } else { |
1601 __ Bind(&return_label_); | 1605 __ Bind(&return_label_); |
| 1606 AssembleDeconstructFrame(); |
1602 if (descriptor->UseNativeStack()) { | 1607 if (descriptor->UseNativeStack()) { |
1603 __ Mov(csp, fp); | |
1604 pop_count += (pop_count & 1); // align | 1608 pop_count += (pop_count & 1); // align |
1605 } else { | |
1606 __ Mov(jssp, fp); | |
1607 } | 1609 } |
1608 __ Pop(fp, lr); | |
1609 } | 1610 } |
1610 } else if (descriptor->UseNativeStack()) { | 1611 } else if (descriptor->UseNativeStack()) { |
1611 pop_count += (pop_count & 1); // align | 1612 pop_count += (pop_count & 1); // align |
1612 } | 1613 } |
1613 __ Drop(pop_count); | 1614 __ Drop(pop_count); |
1614 | 1615 |
1615 if (descriptor->UseNativeStack()) { | 1616 if (descriptor->UseNativeStack()) { |
1616 __ AssertCspAligned(); | 1617 __ AssertCspAligned(); |
1617 } | 1618 } |
1618 __ Ret(); | 1619 __ Ret(); |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1801 padding_size -= kInstructionSize; | 1802 padding_size -= kInstructionSize; |
1802 } | 1803 } |
1803 } | 1804 } |
1804 } | 1805 } |
1805 | 1806 |
1806 #undef __ | 1807 #undef __ |
1807 | 1808 |
1808 } // namespace compiler | 1809 } // namespace compiler |
1809 } // namespace internal | 1810 } // namespace internal |
1810 } // namespace v8 | 1811 } // namespace v8 |
OLD | NEW |