| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
| 6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 7 | 7 |
| 8 #include "src/v8.h" | 8 #include "src/v8.h" |
| 9 | 9 |
| 10 #if V8_TARGET_ARCH_PPC | 10 #if V8_TARGET_ARCH_PPC |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 96 | 96 |
| 97 | 97 |
| 98 void MacroAssembler::CallJSEntry(Register target) { | 98 void MacroAssembler::CallJSEntry(Register target) { |
| 99 DCHECK(target.is(ip)); | 99 DCHECK(target.is(ip)); |
| 100 Call(target); | 100 Call(target); |
| 101 } | 101 } |
| 102 | 102 |
| 103 | 103 |
| 104 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode, | 104 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode, |
| 105 Condition cond) { | 105 Condition cond) { |
| 106 return (2 + kMovInstructions) * kInstrSize; | 106 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); |
| 107 return (2 + instructions_required_for_mov(ip, mov_operand)) * kInstrSize; |
| 107 } | 108 } |
| 108 | 109 |
| 109 | 110 |
| 110 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target, | 111 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target, |
| 111 RelocInfo::Mode rmode, | 112 RelocInfo::Mode rmode, |
| 112 Condition cond) { | 113 Condition cond) { |
| 113 return (2 + kMovInstructions) * kInstrSize; | 114 return (2 + kMovInstructionsNoConstantPool) * kInstrSize; |
| 114 } | 115 } |
| 115 | 116 |
| 116 | 117 |
| 117 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode, | 118 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode, |
| 118 Condition cond) { | 119 Condition cond) { |
| 119 BlockTrampolinePoolScope block_trampoline_pool(this); | 120 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 120 DCHECK(cond == al); | 121 DCHECK(cond == al); |
| 121 | 122 |
| 122 #ifdef DEBUG | 123 #ifdef DEBUG |
| 123 // Check the expected size before generating code to ensure we assume the same | 124 // Check the expected size before generating code to ensure we assume the same |
| (...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 506 mtlr(r0); | 507 mtlr(r0); |
| 507 bind(&done); | 508 bind(&done); |
| 508 if (and_then == kReturnAtEnd) { | 509 if (and_then == kReturnAtEnd) { |
| 509 Ret(); | 510 Ret(); |
| 510 } | 511 } |
| 511 } | 512 } |
| 512 | 513 |
| 513 | 514 |
| 514 void MacroAssembler::PushFixedFrame(Register marker_reg) { | 515 void MacroAssembler::PushFixedFrame(Register marker_reg) { |
| 515 mflr(r0); | 516 mflr(r0); |
| 516 if (marker_reg.is_valid()) { | 517 if (FLAG_enable_embedded_constant_pool) { |
| 517 Push(r0, fp, cp, marker_reg); | 518 if (marker_reg.is_valid()) { |
| 519 Push(r0, fp, kConstantPoolRegister, cp, marker_reg); |
| 520 } else { |
| 521 Push(r0, fp, kConstantPoolRegister, cp); |
| 522 } |
| 518 } else { | 523 } else { |
| 519 Push(r0, fp, cp); | 524 if (marker_reg.is_valid()) { |
| 525 Push(r0, fp, cp, marker_reg); |
| 526 } else { |
| 527 Push(r0, fp, cp); |
| 528 } |
| 520 } | 529 } |
| 521 } | 530 } |
| 522 | 531 |
| 523 | 532 |
| 524 void MacroAssembler::PopFixedFrame(Register marker_reg) { | 533 void MacroAssembler::PopFixedFrame(Register marker_reg) { |
| 525 if (marker_reg.is_valid()) { | 534 if (FLAG_enable_embedded_constant_pool) { |
| 526 Pop(r0, fp, cp, marker_reg); | 535 if (marker_reg.is_valid()) { |
| 536 Pop(r0, fp, kConstantPoolRegister, cp, marker_reg); |
| 537 } else { |
| 538 Pop(r0, fp, kConstantPoolRegister, cp); |
| 539 } |
| 527 } else { | 540 } else { |
| 528 Pop(r0, fp, cp); | 541 if (marker_reg.is_valid()) { |
| 542 Pop(r0, fp, cp, marker_reg); |
| 543 } else { |
| 544 Pop(r0, fp, cp); |
| 545 } |
| 529 } | 546 } |
| 530 mtlr(r0); | 547 mtlr(r0); |
| 531 } | 548 } |
| 532 | 549 |
| 533 | 550 |
| 534 const RegList MacroAssembler::kSafepointSavedRegisters = Register::kAllocatable; | 551 const RegList MacroAssembler::kSafepointSavedRegisters = Register::kAllocatable; |
| 535 const int MacroAssembler::kNumSafepointSavedRegisters = | 552 const int MacroAssembler::kNumSafepointSavedRegisters = |
| 536 Register::kMaxNumAllocatableRegisters; | 553 Register::kMaxNumAllocatableRegisters; |
| 537 | 554 |
| 538 // Push and pop all registers that can hold pointers. | 555 // Push and pop all registers that can hold pointers. |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 644 } | 661 } |
| 645 | 662 |
| 646 MovDoubleToInt64( | 663 MovDoubleToInt64( |
| 647 #if !V8_TARGET_ARCH_PPC64 | 664 #if !V8_TARGET_ARCH_PPC64 |
| 648 dst_hi, | 665 dst_hi, |
| 649 #endif | 666 #endif |
| 650 dst, double_dst); | 667 dst, double_dst); |
| 651 } | 668 } |
| 652 | 669 |
| 653 | 670 |
| 671 void MacroAssembler::LoadTargetConstantPoolPointerRegister(Register target) { |
| 672 lwz(kConstantPoolRegister, |
| 673 MemOperand(target, Code::kConstantPoolOffset - Code::kHeaderSize)); |
| 674 add(kConstantPoolRegister, kConstantPoolRegister, target); |
| 675 } |
| 676 |
| 677 |
| 678 void MacroAssembler::LoadConstantPoolPointerRegister(Register base, |
| 679 int code_start_delta) { |
| 680 add_label_offset(kConstantPoolRegister, base, ConstantPoolPosition(), |
| 681 code_start_delta); |
| 682 } |
| 683 |
| 684 |
| 685 void MacroAssembler::LoadConstantPoolPointerRegister() { |
| 686 mov_label_addr(kConstantPoolRegister, ConstantPoolPosition()); |
| 687 } |
| 688 |
| 689 |
| 654 void MacroAssembler::StubPrologue(int prologue_offset) { | 690 void MacroAssembler::StubPrologue(int prologue_offset) { |
| 655 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); | 691 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); |
| 656 PushFixedFrame(r11); | 692 PushFixedFrame(r11); |
| 657 // Adjust FP to point to saved FP. | 693 // Adjust FP to point to saved FP. |
| 658 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 694 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 695 if (FLAG_enable_embedded_constant_pool) { |
| 696 // ip contains prologue address |
| 697 LoadConstantPoolPointerRegister(ip, -prologue_offset); |
| 698 set_constant_pool_available(true); |
| 699 } |
| 659 } | 700 } |
| 660 | 701 |
| 661 | 702 |
| 662 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) { | 703 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) { |
| 663 { | 704 { |
| 664 PredictableCodeSizeScope predictible_code_size_scope( | 705 PredictableCodeSizeScope predictible_code_size_scope( |
| 665 this, kNoCodeAgeSequenceLength); | 706 this, kNoCodeAgeSequenceLength); |
| 666 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this); | 707 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this); |
| 667 // The following instructions must remain together and unmodified | 708 // The following instructions must remain together and unmodified |
| 668 // for code aging to work properly. | 709 // for code aging to work properly. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 681 } else { | 722 } else { |
| 682 // This matches the code found in GetNoCodeAgeSequence() | 723 // This matches the code found in GetNoCodeAgeSequence() |
| 683 PushFixedFrame(r4); | 724 PushFixedFrame(r4); |
| 684 // Adjust fp to point to saved fp. | 725 // Adjust fp to point to saved fp. |
| 685 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 726 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 686 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) { | 727 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) { |
| 687 nop(); | 728 nop(); |
| 688 } | 729 } |
| 689 } | 730 } |
| 690 } | 731 } |
| 732 if (FLAG_enable_embedded_constant_pool) { |
| 733 // ip contains prologue address |
| 734 LoadConstantPoolPointerRegister(ip, -prologue_offset); |
| 735 set_constant_pool_available(true); |
| 736 } |
| 691 } | 737 } |
| 692 | 738 |
| 693 | 739 |
| 694 void MacroAssembler::EnterFrame(StackFrame::Type type, | 740 void MacroAssembler::EnterFrame(StackFrame::Type type, |
| 695 bool load_constant_pool_pointer_reg) { | 741 bool load_constant_pool_pointer_reg) { |
| 696 LoadSmiLiteral(ip, Smi::FromInt(type)); | 742 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) { |
| 697 PushFixedFrame(ip); | 743 PushFixedFrame(); |
| 744 // This path should not rely on ip containing code entry. |
| 745 LoadConstantPoolPointerRegister(); |
| 746 LoadSmiLiteral(ip, Smi::FromInt(type)); |
| 747 push(ip); |
| 748 } else { |
| 749 LoadSmiLiteral(ip, Smi::FromInt(type)); |
| 750 PushFixedFrame(ip); |
| 751 } |
| 698 // Adjust FP to point to saved FP. | 752 // Adjust FP to point to saved FP. |
| 699 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 753 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 700 | 754 |
| 701 mov(r0, Operand(CodeObject())); | 755 mov(r0, Operand(CodeObject())); |
| 702 push(r0); | 756 push(r0); |
| 703 } | 757 } |
| 704 | 758 |
| 705 | 759 |
| 706 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { | 760 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { |
| 761 ConstantPoolUnavailableScope constant_pool_unavailable(this); |
| 707 // r3: preserved | 762 // r3: preserved |
| 708 // r4: preserved | 763 // r4: preserved |
| 709 // r5: preserved | 764 // r5: preserved |
| 710 | 765 |
| 711 // Drop the execution stack down to the frame pointer and restore | 766 // Drop the execution stack down to the frame pointer and restore |
| 712 // the caller's state. | 767 // the caller's state. |
| 713 int frame_ends; | 768 int frame_ends; |
| 714 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 769 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
| 715 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 770 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 771 if (FLAG_enable_embedded_constant_pool) { |
| 772 const int exitOffset = ExitFrameConstants::kConstantPoolOffset; |
| 773 const int standardOffset = StandardFrameConstants::kConstantPoolOffset; |
| 774 const int offset = |
| 775 ((type == StackFrame::EXIT) ? exitOffset : standardOffset); |
| 776 LoadP(kConstantPoolRegister, MemOperand(fp, offset)); |
| 777 } |
| 716 mtlr(r0); | 778 mtlr(r0); |
| 717 frame_ends = pc_offset(); | 779 frame_ends = pc_offset(); |
| 718 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0); | 780 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0); |
| 719 mr(fp, ip); | 781 mr(fp, ip); |
| 720 return frame_ends; | 782 return frame_ends; |
| 721 } | 783 } |
| 722 | 784 |
| 723 | 785 |
| 724 // ExitFrame layout (probably wrongish.. needs updating) | 786 // ExitFrame layout (probably wrongish.. needs updating) |
| 725 // | 787 // |
| (...skipping 26 matching lines...) Expand all Loading... |
| 752 mflr(r0); | 814 mflr(r0); |
| 753 Push(r0, fp); | 815 Push(r0, fp); |
| 754 mr(fp, sp); | 816 mr(fp, sp); |
| 755 // Reserve room for saved entry sp and code object. | 817 // Reserve room for saved entry sp and code object. |
| 756 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize)); | 818 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize)); |
| 757 | 819 |
| 758 if (emit_debug_code()) { | 820 if (emit_debug_code()) { |
| 759 li(r8, Operand::Zero()); | 821 li(r8, Operand::Zero()); |
| 760 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 822 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
| 761 } | 823 } |
| 824 if (FLAG_enable_embedded_constant_pool) { |
| 825 StoreP(kConstantPoolRegister, |
| 826 MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); |
| 827 } |
| 762 mov(r8, Operand(CodeObject())); | 828 mov(r8, Operand(CodeObject())); |
| 763 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 829 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
| 764 | 830 |
| 765 // Save the frame pointer and the context in top. | 831 // Save the frame pointer and the context in top. |
| 766 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 832 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
| 767 StoreP(fp, MemOperand(r8)); | 833 StoreP(fp, MemOperand(r8)); |
| 768 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 834 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
| 769 StoreP(cp, MemOperand(r8)); | 835 StoreP(cp, MemOperand(r8)); |
| 770 | 836 |
| 771 // Optionally save all volatile double registers. | 837 // Optionally save all volatile double registers. |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 821 // if the target platform will need alignment, so this is controlled from a | 887 // if the target platform will need alignment, so this is controlled from a |
| 822 // flag. | 888 // flag. |
| 823 return FLAG_sim_stack_alignment; | 889 return FLAG_sim_stack_alignment; |
| 824 #endif | 890 #endif |
| 825 } | 891 } |
| 826 | 892 |
| 827 | 893 |
| 828 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, | 894 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, |
| 829 bool restore_context, | 895 bool restore_context, |
| 830 bool argument_count_is_length) { | 896 bool argument_count_is_length) { |
| 897 ConstantPoolUnavailableScope constant_pool_unavailable(this); |
| 831 // Optionally restore all double registers. | 898 // Optionally restore all double registers. |
| 832 if (save_doubles) { | 899 if (save_doubles) { |
| 833 // Calculate the stack location of the saved doubles and restore them. | 900 // Calculate the stack location of the saved doubles and restore them. |
| 834 const int kNumRegs = DoubleRegister::kNumVolatileRegisters; | 901 const int kNumRegs = DoubleRegister::kNumVolatileRegisters; |
| 835 const int offset = | 902 const int offset = |
| 836 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize); | 903 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize); |
| 837 addi(r6, fp, Operand(-offset)); | 904 addi(r6, fp, Operand(-offset)); |
| 838 RestoreFPRegs(r6, 0, kNumRegs); | 905 RestoreFPRegs(r6, 0, kNumRegs); |
| 839 } | 906 } |
| 840 | 907 |
| (...skipping 2327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3168 isync(); | 3235 isync(); |
| 3169 | 3236 |
| 3170 bind(&done); | 3237 bind(&done); |
| 3171 } | 3238 } |
| 3172 | 3239 |
| 3173 | 3240 |
| 3174 void MacroAssembler::SetRelocatedValue(Register location, Register scratch, | 3241 void MacroAssembler::SetRelocatedValue(Register location, Register scratch, |
| 3175 Register new_value) { | 3242 Register new_value) { |
| 3176 lwz(scratch, MemOperand(location)); | 3243 lwz(scratch, MemOperand(location)); |
| 3177 | 3244 |
| 3245 if (FLAG_enable_embedded_constant_pool) { |
| 3246 if (emit_debug_code()) { |
| 3247 // Check that the instruction sequence is a load from the constant pool |
| 3248 ExtractBitMask(scratch, scratch, 0x1f * B16); |
| 3249 cmpi(scratch, Operand(kConstantPoolRegister.code())); |
| 3250 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); |
| 3251 // Scratch was clobbered. Restore it. |
| 3252 lwz(scratch, MemOperand(location)); |
| 3253 } |
| 3254 // Get the address of the constant and patch it. |
| 3255 andi(scratch, scratch, Operand(kImm16Mask)); |
| 3256 StorePX(new_value, MemOperand(kConstantPoolRegister, scratch)); |
| 3257 return; |
| 3258 } |
| 3259 |
| 3178 // This code assumes a FIXED_SEQUENCE for lis/ori | 3260 // This code assumes a FIXED_SEQUENCE for lis/ori |
| 3179 | 3261 |
| 3180 // At this point scratch is a lis instruction. | 3262 // At this point scratch is a lis instruction. |
| 3181 if (emit_debug_code()) { | 3263 if (emit_debug_code()) { |
| 3182 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16))); | 3264 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16))); |
| 3183 Cmpi(scratch, Operand(ADDIS), r0); | 3265 Cmpi(scratch, Operand(ADDIS), r0); |
| 3184 Check(eq, kTheInstructionToPatchShouldBeALis); | 3266 Check(eq, kTheInstructionToPatchShouldBeALis); |
| 3185 lwz(scratch, MemOperand(location)); | 3267 lwz(scratch, MemOperand(location)); |
| 3186 } | 3268 } |
| 3187 | 3269 |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3251 #else | 3333 #else |
| 3252 FlushICache(location, 2 * kInstrSize, scratch); | 3334 FlushICache(location, 2 * kInstrSize, scratch); |
| 3253 #endif | 3335 #endif |
| 3254 } | 3336 } |
| 3255 | 3337 |
| 3256 | 3338 |
| 3257 void MacroAssembler::GetRelocatedValue(Register location, Register result, | 3339 void MacroAssembler::GetRelocatedValue(Register location, Register result, |
| 3258 Register scratch) { | 3340 Register scratch) { |
| 3259 lwz(result, MemOperand(location)); | 3341 lwz(result, MemOperand(location)); |
| 3260 | 3342 |
| 3343 if (FLAG_enable_embedded_constant_pool) { |
| 3344 if (emit_debug_code()) { |
| 3345 // Check that the instruction sequence is a load from the constant pool |
| 3346 ExtractBitMask(result, result, 0x1f * B16); |
| 3347 cmpi(result, Operand(kConstantPoolRegister.code())); |
| 3348 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); |
| 3349 lwz(result, MemOperand(location)); |
| 3350 } |
| 3351 // Get the address of the constant and retrieve it. |
| 3352 andi(result, result, Operand(kImm16Mask)); |
| 3353 LoadPX(result, MemOperand(kConstantPoolRegister, result)); |
| 3354 return; |
| 3355 } |
| 3356 |
| 3261 // This code assumes a FIXED_SEQUENCE for lis/ori | 3357 // This code assumes a FIXED_SEQUENCE for lis/ori |
| 3262 if (emit_debug_code()) { | 3358 if (emit_debug_code()) { |
| 3263 And(result, result, Operand(kOpcodeMask | (0x1f * B16))); | 3359 And(result, result, Operand(kOpcodeMask | (0x1f * B16))); |
| 3264 Cmpi(result, Operand(ADDIS), r0); | 3360 Cmpi(result, Operand(ADDIS), r0); |
| 3265 Check(eq, kTheInstructionShouldBeALis); | 3361 Check(eq, kTheInstructionShouldBeALis); |
| 3266 lwz(result, MemOperand(location)); | 3362 lwz(result, MemOperand(location)); |
| 3267 } | 3363 } |
| 3268 | 3364 |
| 3269 // result now holds a lis instruction. Extract the immediate. | 3365 // result now holds a lis instruction. Extract the immediate. |
| 3270 slwi(result, result, Operand(16)); | 3366 slwi(result, result, Operand(16)); |
| (...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3688 } | 3784 } |
| 3689 | 3785 |
| 3690 | 3786 |
| 3691 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) { | 3787 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) { |
| 3692 mov(dst, Operand(smi)); | 3788 mov(dst, Operand(smi)); |
| 3693 } | 3789 } |
| 3694 | 3790 |
| 3695 | 3791 |
| 3696 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value, | 3792 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value, |
| 3697 Register scratch) { | 3793 Register scratch) { |
| 3794 if (FLAG_enable_embedded_constant_pool && is_constant_pool_available() && |
| 3795 !(scratch.is(r0) && ConstantPoolOverflow())) { |
| 3796 ConstantPoolEntry::Access access = ConstantPoolAddEntry(value); |
| 3797 if (access == ConstantPoolEntry::OVERFLOWED) { |
| 3798 addis(scratch, kConstantPoolRegister, Operand::Zero()); |
| 3799 lfd(result, MemOperand(scratch, 0)); |
| 3800 } else { |
| 3801 lfd(result, MemOperand(kConstantPoolRegister, 0)); |
| 3802 } |
| 3803 return; |
| 3804 } |
| 3805 |
| 3698 // avoid gcc strict aliasing error using union cast | 3806 // avoid gcc strict aliasing error using union cast |
| 3699 union { | 3807 union { |
| 3700 double dval; | 3808 double dval; |
| 3701 #if V8_TARGET_ARCH_PPC64 | 3809 #if V8_TARGET_ARCH_PPC64 |
| 3702 intptr_t ival; | 3810 intptr_t ival; |
| 3703 #else | 3811 #else |
| 3704 intptr_t ival[2]; | 3812 intptr_t ival[2]; |
| 3705 #endif | 3813 #endif |
| 3706 } litVal; | 3814 } litVal; |
| 3707 | 3815 |
| (...skipping 815 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4523 } | 4631 } |
| 4524 if (mag.shift > 0) srawi(result, result, mag.shift); | 4632 if (mag.shift > 0) srawi(result, result, mag.shift); |
| 4525 ExtractBit(r0, dividend, 31); | 4633 ExtractBit(r0, dividend, 31); |
| 4526 add(result, result, r0); | 4634 add(result, result, r0); |
| 4527 } | 4635 } |
| 4528 | 4636 |
| 4529 } // namespace internal | 4637 } // namespace internal |
| 4530 } // namespace v8 | 4638 } // namespace v8 |
| 4531 | 4639 |
| 4532 #endif // V8_TARGET_ARCH_PPC | 4640 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |