Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/ppc/macro-assembler-ppc.cc

Issue 1131783003: Embedded constant pools. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix debug-mode Arm issue. Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/macro-assembler-ppc.h ('k') | src/runtime/runtime-generator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <assert.h> // For assert 5 #include <assert.h> // For assert
6 #include <limits.h> // For LONG_MIN, LONG_MAX. 6 #include <limits.h> // For LONG_MIN, LONG_MAX.
7 7
8 #include "src/v8.h" 8 #include "src/v8.h"
9 9
10 #if V8_TARGET_ARCH_PPC 10 #if V8_TARGET_ARCH_PPC
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
96 96
97 97
98 void MacroAssembler::CallJSEntry(Register target) { 98 void MacroAssembler::CallJSEntry(Register target) {
99 DCHECK(target.is(ip)); 99 DCHECK(target.is(ip));
100 Call(target); 100 Call(target);
101 } 101 }
102 102
103 103
104 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode, 104 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode,
105 Condition cond) { 105 Condition cond) {
106 return (2 + kMovInstructions) * kInstrSize; 106 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
107 return (2 + instructions_required_for_mov(ip, mov_operand)) * kInstrSize;
107 } 108 }
108 109
109 110
110 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target, 111 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target,
111 RelocInfo::Mode rmode, 112 RelocInfo::Mode rmode,
112 Condition cond) { 113 Condition cond) {
113 return (2 + kMovInstructions) * kInstrSize; 114 return (2 + kMovInstructionsNoConstantPool) * kInstrSize;
114 } 115 }
115 116
116 117
117 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode, 118 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode,
118 Condition cond) { 119 Condition cond) {
119 BlockTrampolinePoolScope block_trampoline_pool(this); 120 BlockTrampolinePoolScope block_trampoline_pool(this);
120 DCHECK(cond == al); 121 DCHECK(cond == al);
121 122
122 #ifdef DEBUG 123 #ifdef DEBUG
123 // Check the expected size before generating code to ensure we assume the same 124 // Check the expected size before generating code to ensure we assume the same
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after
506 mtlr(r0); 507 mtlr(r0);
507 bind(&done); 508 bind(&done);
508 if (and_then == kReturnAtEnd) { 509 if (and_then == kReturnAtEnd) {
509 Ret(); 510 Ret();
510 } 511 }
511 } 512 }
512 513
513 514
514 void MacroAssembler::PushFixedFrame(Register marker_reg) { 515 void MacroAssembler::PushFixedFrame(Register marker_reg) {
515 mflr(r0); 516 mflr(r0);
516 if (marker_reg.is_valid()) { 517 if (FLAG_enable_embedded_constant_pool) {
517 Push(r0, fp, cp, marker_reg); 518 if (marker_reg.is_valid()) {
519 Push(r0, fp, kConstantPoolRegister, cp, marker_reg);
520 } else {
521 Push(r0, fp, kConstantPoolRegister, cp);
522 }
518 } else { 523 } else {
519 Push(r0, fp, cp); 524 if (marker_reg.is_valid()) {
525 Push(r0, fp, cp, marker_reg);
526 } else {
527 Push(r0, fp, cp);
528 }
520 } 529 }
521 } 530 }
522 531
523 532
524 void MacroAssembler::PopFixedFrame(Register marker_reg) { 533 void MacroAssembler::PopFixedFrame(Register marker_reg) {
525 if (marker_reg.is_valid()) { 534 if (FLAG_enable_embedded_constant_pool) {
526 Pop(r0, fp, cp, marker_reg); 535 if (marker_reg.is_valid()) {
536 Pop(r0, fp, kConstantPoolRegister, cp, marker_reg);
537 } else {
538 Pop(r0, fp, kConstantPoolRegister, cp);
539 }
527 } else { 540 } else {
528 Pop(r0, fp, cp); 541 if (marker_reg.is_valid()) {
542 Pop(r0, fp, cp, marker_reg);
543 } else {
544 Pop(r0, fp, cp);
545 }
529 } 546 }
530 mtlr(r0); 547 mtlr(r0);
531 } 548 }
532 549
533 550
534 const RegList MacroAssembler::kSafepointSavedRegisters = Register::kAllocatable; 551 const RegList MacroAssembler::kSafepointSavedRegisters = Register::kAllocatable;
535 const int MacroAssembler::kNumSafepointSavedRegisters = 552 const int MacroAssembler::kNumSafepointSavedRegisters =
536 Register::kMaxNumAllocatableRegisters; 553 Register::kMaxNumAllocatableRegisters;
537 554
538 // Push and pop all registers that can hold pointers. 555 // Push and pop all registers that can hold pointers.
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 } 661 }
645 662
646 MovDoubleToInt64( 663 MovDoubleToInt64(
647 #if !V8_TARGET_ARCH_PPC64 664 #if !V8_TARGET_ARCH_PPC64
648 dst_hi, 665 dst_hi,
649 #endif 666 #endif
650 dst, double_dst); 667 dst, double_dst);
651 } 668 }
652 669
653 670
671 void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
672 Register code_target_address) {
673 lwz(kConstantPoolRegister,
674 MemOperand(code_target_address,
675 Code::kConstantPoolOffset - Code::kHeaderSize));
676 add(kConstantPoolRegister, kConstantPoolRegister, code_target_address);
677 }
678
679
680 void MacroAssembler::LoadConstantPoolPointerRegister(Register base,
681 int code_start_delta) {
682 add_label_offset(kConstantPoolRegister, base, ConstantPoolPosition(),
683 code_start_delta);
684 }
685
686
687 void MacroAssembler::LoadConstantPoolPointerRegister() {
688 mov_label_addr(kConstantPoolRegister, ConstantPoolPosition());
689 }
690
691
654 void MacroAssembler::StubPrologue(int prologue_offset) { 692 void MacroAssembler::StubPrologue(int prologue_offset) {
655 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); 693 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB));
656 PushFixedFrame(r11); 694 PushFixedFrame(r11);
657 // Adjust FP to point to saved FP. 695 // Adjust FP to point to saved FP.
658 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 696 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
697 if (FLAG_enable_embedded_constant_pool) {
698 // ip contains prologue address
699 LoadConstantPoolPointerRegister(ip, -prologue_offset);
700 set_constant_pool_available(true);
701 }
659 } 702 }
660 703
661 704
662 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) { 705 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) {
663 { 706 {
664 PredictableCodeSizeScope predictible_code_size_scope( 707 PredictableCodeSizeScope predictible_code_size_scope(
665 this, kNoCodeAgeSequenceLength); 708 this, kNoCodeAgeSequenceLength);
666 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this); 709 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this);
667 // The following instructions must remain together and unmodified 710 // The following instructions must remain together and unmodified
668 // for code aging to work properly. 711 // for code aging to work properly.
(...skipping 12 matching lines...) Expand all
681 } else { 724 } else {
682 // This matches the code found in GetNoCodeAgeSequence() 725 // This matches the code found in GetNoCodeAgeSequence()
683 PushFixedFrame(r4); 726 PushFixedFrame(r4);
684 // Adjust fp to point to saved fp. 727 // Adjust fp to point to saved fp.
685 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 728 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
686 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) { 729 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) {
687 nop(); 730 nop();
688 } 731 }
689 } 732 }
690 } 733 }
734 if (FLAG_enable_embedded_constant_pool) {
735 // ip contains prologue address
736 LoadConstantPoolPointerRegister(ip, -prologue_offset);
737 set_constant_pool_available(true);
738 }
691 } 739 }
692 740
693 741
694 void MacroAssembler::EnterFrame(StackFrame::Type type, 742 void MacroAssembler::EnterFrame(StackFrame::Type type,
695 bool load_constant_pool_pointer_reg) { 743 bool load_constant_pool_pointer_reg) {
696 LoadSmiLiteral(ip, Smi::FromInt(type)); 744 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
697 PushFixedFrame(ip); 745 PushFixedFrame();
746 // This path should not rely on ip containing code entry.
747 LoadConstantPoolPointerRegister();
748 LoadSmiLiteral(ip, Smi::FromInt(type));
749 push(ip);
750 } else {
751 LoadSmiLiteral(ip, Smi::FromInt(type));
752 PushFixedFrame(ip);
753 }
698 // Adjust FP to point to saved FP. 754 // Adjust FP to point to saved FP.
699 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 755 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
700 756
701 mov(r0, Operand(CodeObject())); 757 mov(r0, Operand(CodeObject()));
702 push(r0); 758 push(r0);
703 } 759 }
704 760
705 761
706 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { 762 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) {
763 ConstantPoolUnavailableScope constant_pool_unavailable(this);
707 // r3: preserved 764 // r3: preserved
708 // r4: preserved 765 // r4: preserved
709 // r5: preserved 766 // r5: preserved
710 767
711 // Drop the execution stack down to the frame pointer and restore 768 // Drop the execution stack down to the frame pointer and restore
712 // the caller's state. 769 // the caller's state.
713 int frame_ends; 770 int frame_ends;
714 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); 771 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
715 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 772 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
773 if (FLAG_enable_embedded_constant_pool) {
774 const int exitOffset = ExitFrameConstants::kConstantPoolOffset;
775 const int standardOffset = StandardFrameConstants::kConstantPoolOffset;
776 const int offset =
777 ((type == StackFrame::EXIT) ? exitOffset : standardOffset);
778 LoadP(kConstantPoolRegister, MemOperand(fp, offset));
779 }
716 mtlr(r0); 780 mtlr(r0);
717 frame_ends = pc_offset(); 781 frame_ends = pc_offset();
718 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0); 782 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0);
719 mr(fp, ip); 783 mr(fp, ip);
720 return frame_ends; 784 return frame_ends;
721 } 785 }
722 786
723 787
724 // ExitFrame layout (probably wrongish.. needs updating) 788 // ExitFrame layout (probably wrongish.. needs updating)
725 // 789 //
(...skipping 26 matching lines...) Expand all
752 mflr(r0); 816 mflr(r0);
753 Push(r0, fp); 817 Push(r0, fp);
754 mr(fp, sp); 818 mr(fp, sp);
755 // Reserve room for saved entry sp and code object. 819 // Reserve room for saved entry sp and code object.
756 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize)); 820 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize));
757 821
758 if (emit_debug_code()) { 822 if (emit_debug_code()) {
759 li(r8, Operand::Zero()); 823 li(r8, Operand::Zero());
760 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); 824 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
761 } 825 }
826 if (FLAG_enable_embedded_constant_pool) {
827 StoreP(kConstantPoolRegister,
828 MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
829 }
762 mov(r8, Operand(CodeObject())); 830 mov(r8, Operand(CodeObject()));
763 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); 831 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
764 832
765 // Save the frame pointer and the context in top. 833 // Save the frame pointer and the context in top.
766 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); 834 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
767 StoreP(fp, MemOperand(r8)); 835 StoreP(fp, MemOperand(r8));
768 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); 836 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
769 StoreP(cp, MemOperand(r8)); 837 StoreP(cp, MemOperand(r8));
770 838
771 // Optionally save all volatile double registers. 839 // Optionally save all volatile double registers.
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
821 // if the target platform will need alignment, so this is controlled from a 889 // if the target platform will need alignment, so this is controlled from a
822 // flag. 890 // flag.
823 return FLAG_sim_stack_alignment; 891 return FLAG_sim_stack_alignment;
824 #endif 892 #endif
825 } 893 }
826 894
827 895
828 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, 896 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
829 bool restore_context, 897 bool restore_context,
830 bool argument_count_is_length) { 898 bool argument_count_is_length) {
899 ConstantPoolUnavailableScope constant_pool_unavailable(this);
831 // Optionally restore all double registers. 900 // Optionally restore all double registers.
832 if (save_doubles) { 901 if (save_doubles) {
833 // Calculate the stack location of the saved doubles and restore them. 902 // Calculate the stack location of the saved doubles and restore them.
834 const int kNumRegs = DoubleRegister::kNumVolatileRegisters; 903 const int kNumRegs = DoubleRegister::kNumVolatileRegisters;
835 const int offset = 904 const int offset =
836 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize); 905 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize);
837 addi(r6, fp, Operand(-offset)); 906 addi(r6, fp, Operand(-offset));
838 RestoreFPRegs(r6, 0, kNumRegs); 907 RestoreFPRegs(r6, 0, kNumRegs);
839 } 908 }
840 909
(...skipping 2329 matching lines...) Expand 10 before | Expand all | Expand 10 after
3170 isync(); 3239 isync();
3171 3240
3172 bind(&done); 3241 bind(&done);
3173 } 3242 }
3174 3243
3175 3244
3176 void MacroAssembler::SetRelocatedValue(Register location, Register scratch, 3245 void MacroAssembler::SetRelocatedValue(Register location, Register scratch,
3177 Register new_value) { 3246 Register new_value) {
3178 lwz(scratch, MemOperand(location)); 3247 lwz(scratch, MemOperand(location));
3179 3248
3249 if (FLAG_enable_embedded_constant_pool) {
3250 if (emit_debug_code()) {
3251 // Check that the instruction sequence is a load from the constant pool
3252 ExtractBitMask(scratch, scratch, 0x1f * B16);
3253 cmpi(scratch, Operand(kConstantPoolRegister.code()));
3254 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3255 // Scratch was clobbered. Restore it.
3256 lwz(scratch, MemOperand(location));
3257 }
3258 // Get the address of the constant and patch it.
3259 andi(scratch, scratch, Operand(kImm16Mask));
3260 StorePX(new_value, MemOperand(kConstantPoolRegister, scratch));
3261 return;
3262 }
3263
3180 // This code assumes a FIXED_SEQUENCE for lis/ori 3264 // This code assumes a FIXED_SEQUENCE for lis/ori
3181 3265
3182 // At this point scratch is a lis instruction. 3266 // At this point scratch is a lis instruction.
3183 if (emit_debug_code()) { 3267 if (emit_debug_code()) {
3184 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16))); 3268 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16)));
3185 Cmpi(scratch, Operand(ADDIS), r0); 3269 Cmpi(scratch, Operand(ADDIS), r0);
3186 Check(eq, kTheInstructionToPatchShouldBeALis); 3270 Check(eq, kTheInstructionToPatchShouldBeALis);
3187 lwz(scratch, MemOperand(location)); 3271 lwz(scratch, MemOperand(location));
3188 } 3272 }
3189 3273
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
3253 #else 3337 #else
3254 FlushICache(location, 2 * kInstrSize, scratch); 3338 FlushICache(location, 2 * kInstrSize, scratch);
3255 #endif 3339 #endif
3256 } 3340 }
3257 3341
3258 3342
3259 void MacroAssembler::GetRelocatedValue(Register location, Register result, 3343 void MacroAssembler::GetRelocatedValue(Register location, Register result,
3260 Register scratch) { 3344 Register scratch) {
3261 lwz(result, MemOperand(location)); 3345 lwz(result, MemOperand(location));
3262 3346
3347 if (FLAG_enable_embedded_constant_pool) {
3348 if (emit_debug_code()) {
3349 // Check that the instruction sequence is a load from the constant pool
3350 ExtractBitMask(result, result, 0x1f * B16);
3351 cmpi(result, Operand(kConstantPoolRegister.code()));
3352 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3353 lwz(result, MemOperand(location));
3354 }
3355 // Get the address of the constant and retrieve it.
3356 andi(result, result, Operand(kImm16Mask));
3357 LoadPX(result, MemOperand(kConstantPoolRegister, result));
3358 return;
3359 }
3360
3263 // This code assumes a FIXED_SEQUENCE for lis/ori 3361 // This code assumes a FIXED_SEQUENCE for lis/ori
3264 if (emit_debug_code()) { 3362 if (emit_debug_code()) {
3265 And(result, result, Operand(kOpcodeMask | (0x1f * B16))); 3363 And(result, result, Operand(kOpcodeMask | (0x1f * B16)));
3266 Cmpi(result, Operand(ADDIS), r0); 3364 Cmpi(result, Operand(ADDIS), r0);
3267 Check(eq, kTheInstructionShouldBeALis); 3365 Check(eq, kTheInstructionShouldBeALis);
3268 lwz(result, MemOperand(location)); 3366 lwz(result, MemOperand(location));
3269 } 3367 }
3270 3368
3271 // result now holds a lis instruction. Extract the immediate. 3369 // result now holds a lis instruction. Extract the immediate.
3272 slwi(result, result, Operand(16)); 3370 slwi(result, result, Operand(16));
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after
3690 } 3788 }
3691 3789
3692 3790
3693 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) { 3791 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) {
3694 mov(dst, Operand(smi)); 3792 mov(dst, Operand(smi));
3695 } 3793 }
3696 3794
3697 3795
3698 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value, 3796 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value,
3699 Register scratch) { 3797 Register scratch) {
3798 if (FLAG_enable_embedded_constant_pool && is_constant_pool_available() &&
3799 !(scratch.is(r0) && ConstantPoolAccessIsInOverflow())) {
3800 ConstantPoolEntry::Access access = ConstantPoolAddEntry(value);
3801 if (access == ConstantPoolEntry::OVERFLOWED) {
3802 addis(scratch, kConstantPoolRegister, Operand::Zero());
3803 lfd(result, MemOperand(scratch, 0));
3804 } else {
3805 lfd(result, MemOperand(kConstantPoolRegister, 0));
3806 }
3807 return;
3808 }
3809
3700 // avoid gcc strict aliasing error using union cast 3810 // avoid gcc strict aliasing error using union cast
3701 union { 3811 union {
3702 double dval; 3812 double dval;
3703 #if V8_TARGET_ARCH_PPC64 3813 #if V8_TARGET_ARCH_PPC64
3704 intptr_t ival; 3814 intptr_t ival;
3705 #else 3815 #else
3706 intptr_t ival[2]; 3816 intptr_t ival[2];
3707 #endif 3817 #endif
3708 } litVal; 3818 } litVal;
3709 3819
(...skipping 815 matching lines...) Expand 10 before | Expand all | Expand 10 after
4525 } 4635 }
4526 if (mag.shift > 0) srawi(result, result, mag.shift); 4636 if (mag.shift > 0) srawi(result, result, mag.shift);
4527 ExtractBit(r0, dividend, 31); 4637 ExtractBit(r0, dividend, 31);
4528 add(result, result, r0); 4638 add(result, result, r0);
4529 } 4639 }
4530 4640
4531 } // namespace internal 4641 } // namespace internal
4532 } // namespace v8 4642 } // namespace v8
4533 4643
4534 #endif // V8_TARGET_ARCH_PPC 4644 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/macro-assembler-ppc.h ('k') | src/runtime/runtime-generator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698