OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 5770 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5781 ExternalReference thunk_ref = | 5781 ExternalReference thunk_ref = |
5782 ExternalReference::invoke_accessor_getter_callback(isolate()); | 5782 ExternalReference::invoke_accessor_getter_callback(isolate()); |
5783 | 5783 |
5784 // +3 is to skip prolog, return address and name handle. | 5784 // +3 is to skip prolog, return address and name handle. |
5785 MemOperand return_value_operand( | 5785 MemOperand return_value_operand( |
5786 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5786 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5787 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5787 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5788 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5788 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5789 } | 5789 } |
5790 | 5790 |
| 5791 namespace { |
| 5792 |
| 5793 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5794 Register object, Register scratch, |
| 5795 DoubleRegister double_scratch) { |
| 5796 Label offset_is_not_smi, done_offset; |
| 5797 __ LoadP(scratch, FieldMemOperand(object, JSTypedArray::kBufferOffset)); |
| 5798 __ LoadP(backing_store, |
| 5799 FieldMemOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5800 __ LoadP(scratch, |
| 5801 FieldMemOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5802 __ JumpIfNotSmi(scratch, &offset_is_not_smi); |
| 5803 // offset is smi |
| 5804 __ SmiUntag(scratch); |
| 5805 __ b(&done_offset); |
| 5806 |
| 5807 // offset is a heap number |
| 5808 __ bind(&offset_is_not_smi); |
| 5809 __ lfd(double_scratch, FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
| 5810 __ ConvertDoubleToInt64(double_scratch, |
| 5811 #if !V8_TARGET_ARCH_PPC64 |
| 5812 r0, |
| 5813 #endif |
| 5814 scratch, double_scratch); |
| 5815 __ bind(&done_offset); |
| 5816 __ add(backing_store, backing_store, scratch); |
| 5817 } |
| 5818 |
| 5819 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5820 Register scratch, Register scratch2, |
| 5821 Label* table) { |
| 5822 __ LoadP(scratch, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 5823 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 5824 __ lbz(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 5825 __ subi(scratch, scratch, |
| 5826 Operand(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5827 if (__ emit_debug_code()) { |
| 5828 __ cmpi(scratch, Operand::Zero()); |
| 5829 __ Check(ge, kOffsetOutOfRange); |
| 5830 } |
| 5831 __ ShiftLeftImm(scratch, scratch, Operand(kPointerSizeLog2)); |
| 5832 __ mov_label_addr(scratch2, table); |
| 5833 __ LoadPX(scratch, MemOperand(scratch2, scratch)); |
| 5834 __ Jump(scratch); |
| 5835 } |
| 5836 |
| 5837 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
| 5838 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5839 Label* u32, Label* u8c) { |
| 5840 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5841 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5842 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5843 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5844 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5845 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5846 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5847 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5848 |
| 5849 Label abort; |
| 5850 __ bind(table); |
| 5851 __ emit_label_addr(i8); // Int8Array |
| 5852 __ emit_label_addr(u8); // Uint8Array |
| 5853 __ emit_label_addr(i16); // Int16Array |
| 5854 __ emit_label_addr(u16); // Uint16Array |
| 5855 __ emit_label_addr(i32); // Int32Array |
| 5856 __ emit_label_addr(u32); // Uint32Array |
| 5857 __ emit_label_addr(&abort); // Float32Array |
| 5858 __ emit_label_addr(&abort); // Float64Array |
| 5859 __ emit_label_addr(u8c); // Uint8ClampedArray |
| 5860 |
| 5861 __ bind(&abort); |
| 5862 __ Abort(kNoReason); |
| 5863 } |
| 5864 |
| 5865 #if !V8_TARGET_ARCH_PPC64 |
| 5866 void ReturnInteger32(MacroAssembler* masm, DoubleRegister dst, Register value, |
| 5867 Label* use_heap_number) { |
| 5868 Label not_smi; |
| 5869 __ JumpIfNotSmiCandidate(value, r0, ¬_smi); |
| 5870 __ SmiTag(r3, value); |
| 5871 __ blr(); |
| 5872 |
| 5873 __ bind(¬_smi); |
| 5874 __ ConvertIntToDouble(value, dst); |
| 5875 __ b(use_heap_number); |
| 5876 } |
| 5877 #endif |
| 5878 |
| 5879 void ReturnUnsignedInteger32(MacroAssembler* masm, DoubleRegister dst, |
| 5880 Register value, Label* use_heap_number) { |
| 5881 Label not_smi; |
| 5882 __ JumpIfNotUnsignedSmiCandidate(value, r0, ¬_smi); |
| 5883 __ SmiTag(r3, value); |
| 5884 __ blr(); |
| 5885 |
| 5886 __ bind(¬_smi); |
| 5887 __ ConvertUnsignedIntToDouble(value, dst); |
| 5888 __ b(use_heap_number); |
| 5889 } |
| 5890 |
| 5891 void ReturnAllocatedHeapNumber(MacroAssembler* masm, DoubleRegister value, |
| 5892 Register scratch, Register scratch2, |
| 5893 Register scratch3) { |
| 5894 Label call_runtime; |
| 5895 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); |
| 5896 __ AllocateHeapNumber(r3, scratch, scratch2, scratch3, &call_runtime); |
| 5897 __ stfd(value, FieldMemOperand(r3, HeapNumber::kValueOffset)); |
| 5898 __ blr(); |
| 5899 |
| 5900 __ bind(&call_runtime); |
| 5901 { |
| 5902 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 5903 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5904 __ stfd(value, FieldMemOperand(r3, HeapNumber::kValueOffset)); |
| 5905 } |
| 5906 __ blr(); |
| 5907 } |
| 5908 |
| 5909 } // anonymous namespace |
| 5910 |
| 5911 #define ASSEMBLE_ATOMIC_LOAD(instr, dst, base, index) \ |
| 5912 do { \ |
| 5913 Label not_taken; \ |
| 5914 __ sync(); \ |
| 5915 __ instr(dst, MemOperand(base, index)); \ |
| 5916 __ bind(¬_taken); \ |
| 5917 __ cmp(dst, dst); \ |
| 5918 __ bne(¬_taken); \ |
| 5919 __ isync(); \ |
| 5920 } while (0) |
| 5921 |
| 5922 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5923 Register object = r4; |
| 5924 Register index = r3; // Index is an untagged word32. |
| 5925 Register backing_store = r5; |
| 5926 Label table, i8, u8, i16, u16, i32, u32; |
| 5927 |
| 5928 GetTypedArrayBackingStore(masm, backing_store, object, r6, d0); |
| 5929 TypedArrayJumpTablePrologue(masm, object, r6, r7, &table); |
| 5930 |
| 5931 __ bind(&i8); |
| 5932 ASSEMBLE_ATOMIC_LOAD(lbzx, r3, backing_store, index); |
| 5933 __ extsb(r3, r3); |
| 5934 __ SmiTag(r3); |
| 5935 __ blr(); |
| 5936 |
| 5937 __ bind(&u8); |
| 5938 ASSEMBLE_ATOMIC_LOAD(lbzx, r3, backing_store, index); |
| 5939 __ SmiTag(r3); |
| 5940 __ blr(); |
| 5941 |
| 5942 __ bind(&i16); |
| 5943 __ ShiftLeftImm(index, index, Operand(1)); |
| 5944 ASSEMBLE_ATOMIC_LOAD(lhax, r3, backing_store, index); |
| 5945 __ SmiTag(r3); |
| 5946 __ blr(); |
| 5947 |
| 5948 __ bind(&u16); |
| 5949 __ ShiftLeftImm(index, index, Operand(1)); |
| 5950 ASSEMBLE_ATOMIC_LOAD(lhzx, r3, backing_store, index); |
| 5951 __ SmiTag(r3); |
| 5952 __ blr(); |
| 5953 |
| 5954 Label use_heap_number; |
| 5955 |
| 5956 __ bind(&i32); |
| 5957 __ ShiftLeftImm(index, index, Operand(2)); |
| 5958 ASSEMBLE_ATOMIC_LOAD(lwax, r3, backing_store, index); |
| 5959 #if V8_TARGET_ARCH_PPC64 |
| 5960 __ SmiTag(r3); |
| 5961 __ blr(); |
| 5962 #else |
| 5963 ReturnInteger32(masm, d0, r3, &use_heap_number); |
| 5964 #endif |
| 5965 |
| 5966 __ bind(&u32); |
| 5967 __ ShiftLeftImm(index, index, Operand(2)); |
| 5968 ASSEMBLE_ATOMIC_LOAD(lwzx, r3, backing_store, index); |
| 5969 ReturnUnsignedInteger32(masm, d0, r3, &use_heap_number); |
| 5970 |
| 5971 __ bind(&use_heap_number); |
| 5972 ReturnAllocatedHeapNumber(masm, d0, r4, r5, r6); |
| 5973 |
| 5974 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 5975 &u8); |
| 5976 } |
| 5977 |
5791 #undef __ | 5978 #undef __ |
5792 } // namespace internal | 5979 } // namespace internal |
5793 } // namespace v8 | 5980 } // namespace v8 |
5794 | 5981 |
5795 #endif // V8_TARGET_ARCH_PPC | 5982 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |