OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #include "src/v8.h" | 8 #include "src/v8.h" |
9 | 9 |
10 #if V8_TARGET_ARCH_PPC | 10 #if V8_TARGET_ARCH_PPC |
(...skipping 657 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
668 void MacroAssembler::LoadConstantPoolPointerRegister( | 668 void MacroAssembler::LoadConstantPoolPointerRegister( |
669 CodeObjectAccessMethod access_method, int ip_code_entry_delta) { | 669 CodeObjectAccessMethod access_method, int ip_code_entry_delta) { |
670 Register base; | 670 Register base; |
671 int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize; | 671 int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize; |
672 if (access_method == CAN_USE_IP) { | 672 if (access_method == CAN_USE_IP) { |
673 base = ip; | 673 base = ip; |
674 constant_pool_offset += ip_code_entry_delta; | 674 constant_pool_offset += ip_code_entry_delta; |
675 } else { | 675 } else { |
676 DCHECK(access_method == CONSTRUCT_INTERNAL_REFERENCE); | 676 DCHECK(access_method == CONSTRUCT_INTERNAL_REFERENCE); |
677 base = kConstantPoolRegister; | 677 base = kConstantPoolRegister; |
678 ConstantPoolUnavailableScope constant_pool_unavailable(this); | 678 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
679 | 679 |
680 // CheckBuffer() is called too frequently. This will pre-grow | 680 // CheckBuffer() is called too frequently. This will pre-grow |
681 // the buffer if needed to avoid spliting the relocation and instructions | 681 // the buffer if needed to avoid spliting the relocation and instructions |
682 EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize); | 682 EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize); |
683 | 683 |
684 uintptr_t code_start = reinterpret_cast<uintptr_t>(pc_) - pc_offset(); | 684 intptr_t code_start = reinterpret_cast<intptr_t>(pc_) - pc_offset(); |
685 mov(base, Operand(code_start, RelocInfo::INTERNAL_REFERENCE)); | 685 AddBoundInternalReferenceLoad(pc_offset()); |
| 686 bitwise_mov(base, code_start); |
686 } | 687 } |
687 LoadP(kConstantPoolRegister, MemOperand(base, constant_pool_offset)); | 688 LoadP(kConstantPoolRegister, MemOperand(base, constant_pool_offset)); |
688 } | 689 } |
689 #endif | 690 #endif |
690 | 691 |
691 | 692 |
692 void MacroAssembler::StubPrologue(int prologue_offset) { | 693 void MacroAssembler::StubPrologue(int prologue_offset) { |
693 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); | 694 LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); |
694 PushFixedFrame(r11); | 695 PushFixedFrame(r11); |
695 // Adjust FP to point to saved FP. | 696 // Adjust FP to point to saved FP. |
(...skipping 4038 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4734 } | 4735 } |
4735 if (mag.shift > 0) srawi(result, result, mag.shift); | 4736 if (mag.shift > 0) srawi(result, result, mag.shift); |
4736 ExtractBit(r0, dividend, 31); | 4737 ExtractBit(r0, dividend, 31); |
4737 add(result, result, r0); | 4738 add(result, result, r0); |
4738 } | 4739 } |
4739 | 4740 |
4740 } // namespace internal | 4741 } // namespace internal |
4741 } // namespace v8 | 4742 } // namespace v8 |
4742 | 4743 |
4743 #endif // V8_TARGET_ARCH_PPC | 4744 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |