Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(457)

Side by Side Diff: src/ppc/macro-assembler-ppc.cc

Issue 965823002: Contribution of PowerPC port (continuation of 422063005) - currency (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/macro-assembler-ppc.h ('k') | test/cctest/cctest.status » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <assert.h> // For assert 5 #include <assert.h> // For assert
6 #include <limits.h> // For LONG_MIN, LONG_MAX. 6 #include <limits.h> // For LONG_MIN, LONG_MAX.
7 7
8 #include "src/v8.h" 8 #include "src/v8.h"
9 9
10 #if V8_TARGET_ARCH_PPC 10 #if V8_TARGET_ARCH_PPC
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
97 97
98 98
99 void MacroAssembler::CallJSEntry(Register target) { 99 void MacroAssembler::CallJSEntry(Register target) {
100 DCHECK(target.is(ip)); 100 DCHECK(target.is(ip));
101 Call(target); 101 Call(target);
102 } 102 }
103 103
104 104
105 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode, 105 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode,
106 Condition cond) { 106 Condition cond) {
107 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); 107 return (2 + kMovInstructions) * kInstrSize;
108 return (2 + instructions_required_for_mov(mov_operand)) * kInstrSize;
109 } 108 }
110 109
111 110
112 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target, 111 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target,
113 RelocInfo::Mode rmode, 112 RelocInfo::Mode rmode,
114 Condition cond) { 113 Condition cond) {
115 return (2 + kMovInstructionsNoConstantPool) * kInstrSize; 114 return (2 + kMovInstructions) * kInstrSize;
116 } 115 }
117 116
118 117
119 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode, 118 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode,
120 Condition cond) { 119 Condition cond) {
121 BlockTrampolinePoolScope block_trampoline_pool(this); 120 BlockTrampolinePoolScope block_trampoline_pool(this);
122 DCHECK(cond == al); 121 DCHECK(cond == al);
123 122
124 #ifdef DEBUG 123 #ifdef DEBUG
125 // Check the expected size before generating code to ensure we assume the same 124 // Check the expected size before generating code to ensure we assume the same
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
507 mtlr(r0); 506 mtlr(r0);
508 bind(&done); 507 bind(&done);
509 if (and_then == kReturnAtEnd) { 508 if (and_then == kReturnAtEnd) {
510 Ret(); 509 Ret();
511 } 510 }
512 } 511 }
513 512
514 513
515 void MacroAssembler::PushFixedFrame(Register marker_reg) { 514 void MacroAssembler::PushFixedFrame(Register marker_reg) {
516 mflr(r0); 515 mflr(r0);
517 #if V8_OOL_CONSTANT_POOL
518 if (marker_reg.is_valid()) {
519 Push(r0, fp, kConstantPoolRegister, cp, marker_reg);
520 } else {
521 Push(r0, fp, kConstantPoolRegister, cp);
522 }
523 #else
524 if (marker_reg.is_valid()) { 516 if (marker_reg.is_valid()) {
525 Push(r0, fp, cp, marker_reg); 517 Push(r0, fp, cp, marker_reg);
526 } else { 518 } else {
527 Push(r0, fp, cp); 519 Push(r0, fp, cp);
528 } 520 }
529 #endif
530 } 521 }
531 522
532 523
533 void MacroAssembler::PopFixedFrame(Register marker_reg) { 524 void MacroAssembler::PopFixedFrame(Register marker_reg) {
534 #if V8_OOL_CONSTANT_POOL
535 if (marker_reg.is_valid()) {
536 Pop(r0, fp, kConstantPoolRegister, cp, marker_reg);
537 } else {
538 Pop(r0, fp, kConstantPoolRegister, cp);
539 }
540 #else
541 if (marker_reg.is_valid()) { 525 if (marker_reg.is_valid()) {
542 Pop(r0, fp, cp, marker_reg); 526 Pop(r0, fp, cp, marker_reg);
543 } else { 527 } else {
544 Pop(r0, fp, cp); 528 Pop(r0, fp, cp);
545 } 529 }
546 #endif
547 mtlr(r0); 530 mtlr(r0);
548 } 531 }
549 532
550 533
534 const RegList MacroAssembler::kSafepointSavedRegisters = Register::kAllocatable;
535 const int MacroAssembler::kNumSafepointSavedRegisters =
536 Register::kMaxNumAllocatableRegisters;
537
551 // Push and pop all registers that can hold pointers. 538 // Push and pop all registers that can hold pointers.
552 void MacroAssembler::PushSafepointRegisters() { 539 void MacroAssembler::PushSafepointRegisters() {
553 // Safepoints expect a block of kNumSafepointRegisters values on the 540 // Safepoints expect a block of kNumSafepointRegisters values on the
554 // stack, so adjust the stack for unsaved registers. 541 // stack, so adjust the stack for unsaved registers.
555 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; 542 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
556 DCHECK(num_unsaved >= 0); 543 DCHECK(num_unsaved >= 0);
557 if (num_unsaved > 0) { 544 if (num_unsaved > 0) {
558 subi(sp, sp, Operand(num_unsaved * kPointerSize)); 545 subi(sp, sp, Operand(num_unsaved * kPointerSize));
559 } 546 }
560 MultiPush(kSafepointSavedRegisters); 547 MultiPush(kSafepointSavedRegisters);
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
657 } 644 }
658 645
659 MovDoubleToInt64( 646 MovDoubleToInt64(
660 #if !V8_TARGET_ARCH_PPC64 647 #if !V8_TARGET_ARCH_PPC64
661 dst_hi, 648 dst_hi,
662 #endif 649 #endif
663 dst, double_dst); 650 dst, double_dst);
664 } 651 }
665 652
666 653
667 #if V8_OOL_CONSTANT_POOL
668 void MacroAssembler::LoadConstantPoolPointerRegister(
669 CodeObjectAccessMethod access_method, int ip_code_entry_delta) {
670 Register base;
671 int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize;
672 if (access_method == CAN_USE_IP) {
673 base = ip;
674 constant_pool_offset += ip_code_entry_delta;
675 } else {
676 DCHECK(access_method == CONSTRUCT_INTERNAL_REFERENCE);
677 base = kConstantPoolRegister;
678 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
679
680 // CheckBuffer() is called too frequently. This will pre-grow
681 // the buffer if needed to avoid spliting the relocation and instructions
682 EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize);
683
684 intptr_t code_start = reinterpret_cast<intptr_t>(pc_) - pc_offset();
685 AddBoundInternalReferenceLoad(pc_offset());
686 bitwise_mov(base, code_start);
687 }
688 LoadP(kConstantPoolRegister, MemOperand(base, constant_pool_offset));
689 }
690 #endif
691
692
693 void MacroAssembler::StubPrologue(int prologue_offset) { 654 void MacroAssembler::StubPrologue(int prologue_offset) {
694 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); 655 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB));
695 PushFixedFrame(r11); 656 PushFixedFrame(r11);
696 // Adjust FP to point to saved FP. 657 // Adjust FP to point to saved FP.
697 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 658 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
698 #if V8_OOL_CONSTANT_POOL
699 // ip contains prologue address
700 LoadConstantPoolPointerRegister(CAN_USE_IP, -prologue_offset);
701 set_ool_constant_pool_available(true);
702 #endif
703 } 659 }
704 660
705 661
706 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) { 662 void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) {
707 { 663 {
708 PredictableCodeSizeScope predictible_code_size_scope( 664 PredictableCodeSizeScope predictible_code_size_scope(
709 this, kNoCodeAgeSequenceLength); 665 this, kNoCodeAgeSequenceLength);
710 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this); 666 Assembler::BlockTrampolinePoolScope block_trampoline_pool(this);
711 // The following instructions must remain together and unmodified 667 // The following instructions must remain together and unmodified
712 // for code aging to work properly. 668 // for code aging to work properly.
(...skipping 12 matching lines...) Expand all
725 } else { 681 } else {
726 // This matches the code found in GetNoCodeAgeSequence() 682 // This matches the code found in GetNoCodeAgeSequence()
727 PushFixedFrame(r4); 683 PushFixedFrame(r4);
728 // Adjust fp to point to saved fp. 684 // Adjust fp to point to saved fp.
729 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 685 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
730 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) { 686 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) {
731 nop(); 687 nop();
732 } 688 }
733 } 689 }
734 } 690 }
735 #if V8_OOL_CONSTANT_POOL
736 // ip contains prologue address
737 LoadConstantPoolPointerRegister(CAN_USE_IP, -prologue_offset);
738 set_ool_constant_pool_available(true);
739 #endif
740 } 691 }
741 692
742 693
743 void MacroAssembler::EnterFrame(StackFrame::Type type, 694 void MacroAssembler::EnterFrame(StackFrame::Type type,
744 bool load_constant_pool_pointer_reg) { 695 bool load_constant_pool_pointer_reg) {
745 if (FLAG_enable_ool_constant_pool && load_constant_pool_pointer_reg) { 696 LoadSmiLiteral(ip, Smi::FromInt(type));
746 PushFixedFrame(); 697 PushFixedFrame(ip);
747 #if V8_OOL_CONSTANT_POOL
748 // This path should not rely on ip containing code entry.
749 LoadConstantPoolPointerRegister(CONSTRUCT_INTERNAL_REFERENCE);
750 #endif
751 LoadSmiLiteral(ip, Smi::FromInt(type));
752 push(ip);
753 } else {
754 LoadSmiLiteral(ip, Smi::FromInt(type));
755 PushFixedFrame(ip);
756 }
757 // Adjust FP to point to saved FP. 698 // Adjust FP to point to saved FP.
758 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 699 addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
759 700
760 mov(r0, Operand(CodeObject())); 701 mov(r0, Operand(CodeObject()));
761 push(r0); 702 push(r0);
762 } 703 }
763 704
764 705
765 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { 706 int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) {
766 #if V8_OOL_CONSTANT_POOL
767 ConstantPoolUnavailableScope constant_pool_unavailable(this);
768 #endif
769 // r3: preserved 707 // r3: preserved
770 // r4: preserved 708 // r4: preserved
771 // r5: preserved 709 // r5: preserved
772 710
773 // Drop the execution stack down to the frame pointer and restore 711 // Drop the execution stack down to the frame pointer and restore
774 // the caller frame pointer, return address and constant pool pointer. 712 // the caller's state.
775 int frame_ends; 713 int frame_ends;
776 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); 714 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
777 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 715 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
778 #if V8_OOL_CONSTANT_POOL
779 const int exitOffset = ExitFrameConstants::kConstantPoolOffset;
780 const int standardOffset = StandardFrameConstants::kConstantPoolOffset;
781 const int offset = ((type == StackFrame::EXIT) ? exitOffset : standardOffset);
782 LoadP(kConstantPoolRegister, MemOperand(fp, offset));
783 #endif
784 mtlr(r0); 716 mtlr(r0);
785 frame_ends = pc_offset(); 717 frame_ends = pc_offset();
786 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0); 718 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0);
787 mr(fp, ip); 719 mr(fp, ip);
788 return frame_ends; 720 return frame_ends;
789 } 721 }
790 722
791 723
792 // ExitFrame layout (probably wrongish.. needs updating) 724 // ExitFrame layout (probably wrongish.. needs updating)
793 // 725 //
(...skipping 26 matching lines...) Expand all
820 mflr(r0); 752 mflr(r0);
821 Push(r0, fp); 753 Push(r0, fp);
822 mr(fp, sp); 754 mr(fp, sp);
823 // Reserve room for saved entry sp and code object. 755 // Reserve room for saved entry sp and code object.
824 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize)); 756 subi(sp, sp, Operand(ExitFrameConstants::kFrameSize));
825 757
826 if (emit_debug_code()) { 758 if (emit_debug_code()) {
827 li(r8, Operand::Zero()); 759 li(r8, Operand::Zero());
828 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); 760 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
829 } 761 }
830 #if V8_OOL_CONSTANT_POOL
831 StoreP(kConstantPoolRegister,
832 MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
833 #endif
834 mov(r8, Operand(CodeObject())); 762 mov(r8, Operand(CodeObject()));
835 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); 763 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
836 764
837 // Save the frame pointer and the context in top. 765 // Save the frame pointer and the context in top.
838 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); 766 mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
839 StoreP(fp, MemOperand(r8)); 767 StoreP(fp, MemOperand(r8));
840 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); 768 mov(r8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
841 StoreP(cp, MemOperand(r8)); 769 StoreP(cp, MemOperand(r8));
842 770
843 // Optionally save all volatile double registers. 771 // Optionally save all volatile double registers.
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
893 // if the target platform will need alignment, so this is controlled from a 821 // if the target platform will need alignment, so this is controlled from a
894 // flag. 822 // flag.
895 return FLAG_sim_stack_alignment; 823 return FLAG_sim_stack_alignment;
896 #endif 824 #endif
897 } 825 }
898 826
899 827
900 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, 828 void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
901 bool restore_context, 829 bool restore_context,
902 bool argument_count_is_length) { 830 bool argument_count_is_length) {
903 #if V8_OOL_CONSTANT_POOL
904 ConstantPoolUnavailableScope constant_pool_unavailable(this);
905 #endif
906 // Optionally restore all double registers. 831 // Optionally restore all double registers.
907 if (save_doubles) { 832 if (save_doubles) {
908 // Calculate the stack location of the saved doubles and restore them. 833 // Calculate the stack location of the saved doubles and restore them.
909 const int kNumRegs = DoubleRegister::kNumVolatileRegisters; 834 const int kNumRegs = DoubleRegister::kNumVolatileRegisters;
910 const int offset = 835 const int offset =
911 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize); 836 (ExitFrameConstants::kFrameSize + kNumRegs * kDoubleSize);
912 addi(r6, fp, Operand(-offset)); 837 addi(r6, fp, Operand(-offset));
913 RestoreFPRegs(r6, 0, kNumRegs); 838 RestoreFPRegs(r6, 0, kNumRegs);
914 } 839 }
915 840
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after
1211 addi(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); 1136 addi(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1212 StoreP(r4, MemOperand(ip)); 1137 StoreP(r4, MemOperand(ip));
1213 } 1138 }
1214 1139
1215 1140
1216 // PPC - make use of ip as a temporary register 1141 // PPC - make use of ip as a temporary register
1217 void MacroAssembler::JumpToHandlerEntry() { 1142 void MacroAssembler::JumpToHandlerEntry() {
1218 // Compute the handler entry address and jump to it. The handler table is 1143 // Compute the handler entry address and jump to it. The handler table is
1219 // a fixed array of (smi-tagged) code offsets. 1144 // a fixed array of (smi-tagged) code offsets.
1220 // r3 = exception, r4 = code object, r5 = state. 1145 // r3 = exception, r4 = code object, r5 = state.
1221 #if V8_OOL_CONSTANT_POOL
1222 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1223 LoadP(kConstantPoolRegister, FieldMemOperand(r4, Code::kConstantPoolOffset));
1224 #endif
1225 LoadP(r6, FieldMemOperand(r4, Code::kHandlerTableOffset)); // Handler table. 1146 LoadP(r6, FieldMemOperand(r4, Code::kHandlerTableOffset)); // Handler table.
1147 addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
1226 addi(r6, r6, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1148 addi(r6, r6, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1227 srwi(r5, r5, Operand(StackHandler::kKindWidth)); // Handler index. 1149 srwi(r5, r5, Operand(StackHandler::kKindWidth)); // Handler index.
1228 slwi(ip, r5, Operand(kPointerSizeLog2)); 1150 slwi(ip, r5, Operand(kPointerSizeLog2));
1229 add(ip, r6, ip); 1151 add(ip, r6, ip);
1230 LoadP(r5, MemOperand(ip)); // Smi-tagged offset. 1152 LoadP(r5, MemOperand(ip)); // Smi-tagged offset.
1231 addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
1232 SmiUntag(ip, r5); 1153 SmiUntag(ip, r5);
1233 add(r0, r4, ip); 1154 add(ip, r4, ip);
1234 mtctr(r0); 1155 Jump(ip);
1235 bctr();
1236 } 1156 }
1237 1157
1238 1158
1239 void MacroAssembler::Throw(Register value) { 1159 void MacroAssembler::Throw(Register value) {
1240 // Adjust this code if not the case. 1160 // Adjust this code if not the case.
1241 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 1161 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1242 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 1162 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1243 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 1163 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1244 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 1164 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1245 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 1165 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
(...skipping 855 matching lines...) Expand 10 before | Expand all | Expand 10 after
2101 } 2021 }
2102 2022
2103 2023
2104 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell, 2024 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2105 Label* miss) { 2025 Label* miss) {
2106 GetWeakValue(value, cell); 2026 GetWeakValue(value, cell);
2107 JumpIfSmi(value, miss); 2027 JumpIfSmi(value, miss);
2108 } 2028 }
2109 2029
2110 2030
2031 void MacroAssembler::GetMapConstructor(Register result, Register map,
2032 Register temp, Register temp2) {
2033 Label done, loop;
2034 LoadP(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2035 bind(&loop);
2036 JumpIfSmi(result, &done);
2037 CompareObjectType(result, temp, temp2, MAP_TYPE);
2038 bne(&done);
2039 LoadP(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2040 b(&loop);
2041 bind(&done);
2042 }
2043
2044
2111 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, 2045 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2112 Register scratch, Label* miss, 2046 Register scratch, Label* miss,
2113 bool miss_on_bound_function) { 2047 bool miss_on_bound_function) {
2114 Label non_instance; 2048 Label non_instance;
2115 if (miss_on_bound_function) { 2049 if (miss_on_bound_function) {
2116 // Check that the receiver isn't a smi. 2050 // Check that the receiver isn't a smi.
2117 JumpIfSmi(function, miss); 2051 JumpIfSmi(function, miss);
2118 2052
2119 // Check that the function really is a function. Load map into result reg. 2053 // Check that the function really is a function. Load map into result reg.
2120 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE); 2054 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2157 2091
2158 // Get the prototype from the initial map. 2092 // Get the prototype from the initial map.
2159 LoadP(result, FieldMemOperand(result, Map::kPrototypeOffset)); 2093 LoadP(result, FieldMemOperand(result, Map::kPrototypeOffset));
2160 2094
2161 if (miss_on_bound_function) { 2095 if (miss_on_bound_function) {
2162 b(&done); 2096 b(&done);
2163 2097
2164 // Non-instance prototype: Fetch prototype from constructor field 2098 // Non-instance prototype: Fetch prototype from constructor field
2165 // in initial map. 2099 // in initial map.
2166 bind(&non_instance); 2100 bind(&non_instance);
2167 LoadP(result, FieldMemOperand(result, Map::kConstructorOffset)); 2101 GetMapConstructor(result, result, scratch, ip);
2168 } 2102 }
2169 2103
2170 // All done. 2104 // All done.
2171 bind(&done); 2105 bind(&done);
2172 } 2106 }
2173 2107
2174 2108
2175 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id, 2109 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id,
2176 Condition cond) { 2110 Condition cond) {
2177 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. 2111 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
(...skipping 1186 matching lines...) Expand 10 before | Expand all | Expand 10 after
3364 isync(); 3298 isync();
3365 3299
3366 bind(&done); 3300 bind(&done);
3367 } 3301 }
3368 3302
3369 3303
3370 void MacroAssembler::SetRelocatedValue(Register location, Register scratch, 3304 void MacroAssembler::SetRelocatedValue(Register location, Register scratch,
3371 Register new_value) { 3305 Register new_value) {
3372 lwz(scratch, MemOperand(location)); 3306 lwz(scratch, MemOperand(location));
3373 3307
3374 #if V8_OOL_CONSTANT_POOL
3375 if (emit_debug_code()) {
3376 // Check that the instruction sequence is a load from the constant pool
3377 #if V8_TARGET_ARCH_PPC64
3378 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16)));
3379 Cmpi(scratch, Operand(ADDI), r0);
3380 Check(eq, kTheInstructionShouldBeALi);
3381 lwz(scratch, MemOperand(location, kInstrSize));
3382 #endif
3383 ExtractBitMask(scratch, scratch, 0x1f * B16);
3384 cmpi(scratch, Operand(kConstantPoolRegister.code()));
3385 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3386 // Scratch was clobbered. Restore it.
3387 lwz(scratch, MemOperand(location));
3388 }
3389 // Get the address of the constant and patch it.
3390 andi(scratch, scratch, Operand(kImm16Mask));
3391 StorePX(new_value, MemOperand(kConstantPoolRegister, scratch));
3392 #else
3393 // This code assumes a FIXED_SEQUENCE for lis/ori 3308 // This code assumes a FIXED_SEQUENCE for lis/ori
3394 3309
3395 // At this point scratch is a lis instruction. 3310 // At this point scratch is a lis instruction.
3396 if (emit_debug_code()) { 3311 if (emit_debug_code()) {
3397 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16))); 3312 And(scratch, scratch, Operand(kOpcodeMask | (0x1f * B16)));
3398 Cmpi(scratch, Operand(ADDIS), r0); 3313 Cmpi(scratch, Operand(ADDIS), r0);
3399 Check(eq, kTheInstructionToPatchShouldBeALis); 3314 Check(eq, kTheInstructionToPatchShouldBeALis);
3400 lwz(scratch, MemOperand(location)); 3315 lwz(scratch, MemOperand(location));
3401 } 3316 }
3402 3317
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
3459 rlwimi(scratch, new_value, 0, 16, 31); 3374 rlwimi(scratch, new_value, 0, 16, 31);
3460 stw(scratch, MemOperand(location, 4 * kInstrSize)); 3375 stw(scratch, MemOperand(location, 4 * kInstrSize));
3461 #endif 3376 #endif
3462 3377
3463 // Update the I-cache so the new lis and addic can be executed. 3378 // Update the I-cache so the new lis and addic can be executed.
3464 #if V8_TARGET_ARCH_PPC64 3379 #if V8_TARGET_ARCH_PPC64
3465 FlushICache(location, 5 * kInstrSize, scratch); 3380 FlushICache(location, 5 * kInstrSize, scratch);
3466 #else 3381 #else
3467 FlushICache(location, 2 * kInstrSize, scratch); 3382 FlushICache(location, 2 * kInstrSize, scratch);
3468 #endif 3383 #endif
3469 #endif
3470 } 3384 }
3471 3385
3472 3386
3473 void MacroAssembler::GetRelocatedValue(Register location, Register result, 3387 void MacroAssembler::GetRelocatedValue(Register location, Register result,
3474 Register scratch) { 3388 Register scratch) {
3475 lwz(result, MemOperand(location)); 3389 lwz(result, MemOperand(location));
3476 3390
3477 #if V8_OOL_CONSTANT_POOL
3478 if (emit_debug_code()) {
3479 // Check that the instruction sequence is a load from the constant pool
3480 #if V8_TARGET_ARCH_PPC64
3481 And(result, result, Operand(kOpcodeMask | (0x1f * B16)));
3482 Cmpi(result, Operand(ADDI), r0);
3483 Check(eq, kTheInstructionShouldBeALi);
3484 lwz(result, MemOperand(location, kInstrSize));
3485 #endif
3486 ExtractBitMask(result, result, 0x1f * B16);
3487 cmpi(result, Operand(kConstantPoolRegister.code()));
3488 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3489 lwz(result, MemOperand(location));
3490 }
3491 // Get the address of the constant and retrieve it.
3492 andi(result, result, Operand(kImm16Mask));
3493 LoadPX(result, MemOperand(kConstantPoolRegister, result));
3494 #else
3495 // This code assumes a FIXED_SEQUENCE for lis/ori 3391 // This code assumes a FIXED_SEQUENCE for lis/ori
3496 if (emit_debug_code()) { 3392 if (emit_debug_code()) {
3497 And(result, result, Operand(kOpcodeMask | (0x1f * B16))); 3393 And(result, result, Operand(kOpcodeMask | (0x1f * B16)));
3498 Cmpi(result, Operand(ADDIS), r0); 3394 Cmpi(result, Operand(ADDIS), r0);
3499 Check(eq, kTheInstructionShouldBeALis); 3395 Check(eq, kTheInstructionShouldBeALis);
3500 lwz(result, MemOperand(location)); 3396 lwz(result, MemOperand(location));
3501 } 3397 }
3502 3398
3503 // result now holds a lis instruction. Extract the immediate. 3399 // result now holds a lis instruction. Extract the immediate.
3504 slwi(result, result, Operand(16)); 3400 slwi(result, result, Operand(16));
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3537 // scratch is now ori. 3433 // scratch is now ori.
3538 if (emit_debug_code()) { 3434 if (emit_debug_code()) {
3539 And(scratch, scratch, Operand(kOpcodeMask)); 3435 And(scratch, scratch, Operand(kOpcodeMask));
3540 Cmpi(scratch, Operand(ORI), r0); 3436 Cmpi(scratch, Operand(ORI), r0);
3541 Check(eq, kTheInstructionShouldBeAnOri); 3437 Check(eq, kTheInstructionShouldBeAnOri);
3542 lwz(scratch, MemOperand(location, 4 * kInstrSize)); 3438 lwz(scratch, MemOperand(location, 4 * kInstrSize));
3543 } 3439 }
3544 sldi(result, result, Operand(16)); 3440 sldi(result, result, Operand(16));
3545 rldimi(result, scratch, 0, 48); 3441 rldimi(result, scratch, 0, 48);
3546 #endif 3442 #endif
3547 #endif
3548 } 3443 }
3549 3444
3550 3445
3551 void MacroAssembler::CheckPageFlag( 3446 void MacroAssembler::CheckPageFlag(
3552 Register object, 3447 Register object,
3553 Register scratch, // scratch may be same register as object 3448 Register scratch, // scratch may be same register as object
3554 int mask, Condition cc, Label* condition_met) { 3449 int mask, Condition cc, Label* condition_met) {
3555 DCHECK(cc == ne || cc == eq); 3450 DCHECK(cc == ne || cc == eq);
3556 ClearRightImm(scratch, object, Operand(kPageSizeBits)); 3451 ClearRightImm(scratch, object, Operand(kPageSizeBits));
3557 LoadP(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset)); 3452 LoadP(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after
3923 } 3818 }
3924 3819
3925 3820
3926 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) { 3821 void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) {
3927 mov(dst, Operand(smi)); 3822 mov(dst, Operand(smi));
3928 } 3823 }
3929 3824
3930 3825
3931 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value, 3826 void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value,
3932 Register scratch) { 3827 Register scratch) {
3933 #if V8_OOL_CONSTANT_POOL
3934 // TODO(mbrandy): enable extended constant pool usage for doubles.
3935 // See ARM commit e27ab337 for a reference.
3936 if (is_ool_constant_pool_available() && !is_constant_pool_full()) {
3937 RelocInfo rinfo(pc_, value);
3938 ConstantPoolAddEntry(rinfo);
3939 #if V8_TARGET_ARCH_PPC64
3940 // We use 2 instruction sequence here for consistency with mov.
3941 li(scratch, Operand::Zero());
3942 lfdx(result, MemOperand(kConstantPoolRegister, scratch));
3943 #else
3944 lfd(result, MemOperand(kConstantPoolRegister, 0));
3945 #endif
3946 return;
3947 }
3948 #endif
3949
3950 // avoid gcc strict aliasing error using union cast 3828 // avoid gcc strict aliasing error using union cast
3951 union { 3829 union {
3952 double dval; 3830 double dval;
3953 #if V8_TARGET_ARCH_PPC64 3831 #if V8_TARGET_ARCH_PPC64
3954 intptr_t ival; 3832 intptr_t ival;
3955 #else 3833 #else
3956 intptr_t ival[2]; 3834 intptr_t ival[2];
3957 #endif 3835 #endif
3958 } litVal; 3836 } litVal;
3959 3837
(...skipping 775 matching lines...) Expand 10 before | Expand all | Expand 10 after
4735 } 4613 }
4736 if (mag.shift > 0) srawi(result, result, mag.shift); 4614 if (mag.shift > 0) srawi(result, result, mag.shift);
4737 ExtractBit(r0, dividend, 31); 4615 ExtractBit(r0, dividend, 31);
4738 add(result, result, r0); 4616 add(result, result, r0);
4739 } 4617 }
4740 4618
4741 } // namespace internal 4619 } // namespace internal
4742 } // namespace v8 4620 } // namespace v8
4743 4621
4744 #endif // V8_TARGET_ARCH_PPC 4622 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/macro-assembler-ppc.h ('k') | test/cctest/cctest.status » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698