OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 5680 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5691 ExternalReference thunk_ref = | 5691 ExternalReference thunk_ref = |
5692 ExternalReference::invoke_accessor_getter_callback(isolate()); | 5692 ExternalReference::invoke_accessor_getter_callback(isolate()); |
5693 | 5693 |
5694 // +3 is to skip prolog, return address and name handle. | 5694 // +3 is to skip prolog, return address and name handle. |
5695 MemOperand return_value_operand( | 5695 MemOperand return_value_operand( |
5696 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5696 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5697 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5697 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5698 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5698 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5699 } | 5699 } |
5700 | 5700 |
| 5701 namespace { |
| 5702 |
| 5703 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5704 Register object, Register scratch, |
| 5705 DoubleRegister double_scratch) { |
| 5706 Label offset_is_not_smi, done_offset; |
| 5707 __ LoadP(scratch, FieldMemOperand(object, JSTypedArray::kBufferOffset)); |
| 5708 __ LoadP(backing_store, |
| 5709 FieldMemOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5710 __ LoadP(scratch, |
| 5711 FieldMemOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5712 __ JumpIfNotSmi(scratch, &offset_is_not_smi); |
| 5713 // offset is smi |
| 5714 __ SmiUntag(scratch); |
| 5715 __ b(&done_offset, Label::kNear); |
| 5716 |
| 5717 // offset is a heap number |
| 5718 __ bind(&offset_is_not_smi); |
| 5719 __ LoadDouble(double_scratch, |
| 5720 FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
| 5721 __ ConvertDoubleToInt64(double_scratch, |
| 5722 #if !V8_TARGET_ARCH_S390X |
| 5723 r0, |
| 5724 #endif |
| 5725 scratch, double_scratch); |
| 5726 __ bind(&done_offset); |
| 5727 __ AddP(backing_store, backing_store, scratch); |
| 5728 } |
| 5729 |
| 5730 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5731 Register scratch, Register scratch2, |
| 5732 Label* table) { |
| 5733 __ LoadP(scratch, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 5734 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 5735 __ LoadlB(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 5736 __ SubP(scratch, scratch, |
| 5737 Operand(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5738 if (__ emit_debug_code()) { |
| 5739 __ CmpP(scratch, Operand::Zero()); |
| 5740 __ Check(ge, kOffsetOutOfRange); |
| 5741 } |
| 5742 __ ShiftLeftP(scratch, scratch, Operand(kPointerSizeLog2)); |
| 5743 __ larl(scratch2, table); |
| 5744 __ LoadP(scratch, MemOperand(scratch2, scratch)); |
| 5745 __ Jump(scratch); |
| 5746 } |
| 5747 |
| 5748 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
| 5749 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5750 Label* u32, Label* u8c) { |
| 5751 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5752 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5753 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5754 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5755 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5756 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5757 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5758 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5759 |
| 5760 Label abort; |
| 5761 __ bind(table); |
| 5762 __ emit_label_addr(i8); // Int8Array |
| 5763 __ emit_label_addr(u8); // Uint8Array |
| 5764 __ emit_label_addr(i16); // Int16Array |
| 5765 __ emit_label_addr(u16); // Uint16Array |
| 5766 __ emit_label_addr(i32); // Int32Array |
| 5767 __ emit_label_addr(u32); // Uint32Array |
| 5768 __ emit_label_addr(&abort); // Float32Array |
| 5769 __ emit_label_addr(&abort); // Float64Array |
| 5770 __ emit_label_addr(u8c); // Uint8ClampedArray |
| 5771 |
| 5772 __ bind(&abort); |
| 5773 __ Abort(kNoReason); |
| 5774 } |
| 5775 |
| 5776 #if !V8_TARGET_ARCH_S390X |
| 5777 void ReturnInteger32(MacroAssembler* masm, DoubleRegister dst, Register value, |
| 5778 Label* use_heap_number) { |
| 5779 Label not_smi; |
| 5780 __ JumpIfNotSmiCandidate(value, r0, ¬_smi); |
| 5781 __ SmiTag(r2, value); |
| 5782 __ Ret(); |
| 5783 |
| 5784 __ bind(¬_smi); |
| 5785 __ ConvertIntToDouble(value, dst); |
| 5786 __ b(use_heap_number); |
| 5787 } |
| 5788 #endif |
| 5789 |
| 5790 void ReturnUnsignedInteger32(MacroAssembler* masm, DoubleRegister dst, |
| 5791 Register value, Label* use_heap_number) { |
| 5792 Label not_smi; |
| 5793 __ JumpIfNotUnsignedSmiCandidate(value, r0, ¬_smi); |
| 5794 __ SmiTag(r2, value); |
| 5795 __ Ret(); |
| 5796 |
| 5797 __ bind(¬_smi); |
| 5798 __ ConvertUnsignedIntToDouble(value, dst); |
| 5799 __ b(use_heap_number); |
| 5800 } |
| 5801 |
| 5802 void ReturnAllocatedHeapNumber(MacroAssembler* masm, DoubleRegister value, |
| 5803 Register scratch, Register scratch2, |
| 5804 Register scratch3) { |
| 5805 Label call_runtime; |
| 5806 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); |
| 5807 __ AllocateHeapNumber(r2, scratch, scratch2, scratch3, &call_runtime); |
| 5808 __ StoreDouble(value, FieldMemOperand(r2, HeapNumber::kValueOffset)); |
| 5809 __ Ret(); |
| 5810 |
| 5811 __ bind(&call_runtime); |
| 5812 { |
| 5813 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 5814 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5815 __ StoreDouble(value, FieldMemOperand(r2, HeapNumber::kValueOffset)); |
| 5816 } |
| 5817 __ Ret(); |
| 5818 } |
| 5819 |
| 5820 } // anonymous namespace |
| 5821 |
| 5822 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5823 Register object = r3; |
| 5824 Register index = r2; // Index is an untagged word32. |
| 5825 Register backing_store = r4; |
| 5826 Label table, i8, u8, i16, u16, i32, u32; |
| 5827 |
| 5828 GetTypedArrayBackingStore(masm, backing_store, object, r5, d0); |
| 5829 TypedArrayJumpTablePrologue(masm, object, r5, r6, &table); |
| 5830 |
| 5831 __ bind(&i8); |
| 5832 __ LoadB(r2, MemOperand(index, backing_store)); |
| 5833 __ SmiTag(r2); |
| 5834 __ Ret(); |
| 5835 |
| 5836 __ bind(&u8); |
| 5837 __ LoadlB(r2, MemOperand(index, backing_store)); |
| 5838 __ SmiTag(r2); |
| 5839 __ Ret(); |
| 5840 |
| 5841 __ bind(&i16); |
| 5842 __ ShiftLeftP(index, index, Operand(1)); |
| 5843 __ LoadHalfWordP(r2, MemOperand(index, backing_store)); |
| 5844 __ SmiTag(r2); |
| 5845 __ Ret(); |
| 5846 |
| 5847 __ bind(&u16); |
| 5848 __ ShiftLeftP(index, index, Operand(1)); |
| 5849 __ LoadHalfWordP(r2, MemOperand(index, backing_store)); |
| 5850 __ SmiTag(r2); |
| 5851 __ Ret(); |
| 5852 |
| 5853 Label use_heap_number; |
| 5854 |
| 5855 __ bind(&i32); |
| 5856 __ ShiftLeftP(index, index, Operand(2)); |
| 5857 __ LoadW(r2, MemOperand(index, backing_store)); |
| 5858 #if V8_TARGET_ARCH_S390X |
| 5859 __ SmiTag(r2); |
| 5860 __ Ret(); |
| 5861 #else |
| 5862 ReturnInteger32(masm, d0, r2, &use_heap_number); |
| 5863 #endif |
| 5864 |
| 5865 __ bind(&u32); |
| 5866 __ ShiftLeftP(index, index, Operand(2)); |
| 5867 __ LoadW(r2, MemOperand(index, backing_store)); |
| 5868 ReturnUnsignedInteger32(masm, d0, r2, &use_heap_number); |
| 5869 |
| 5870 __ bind(&use_heap_number); |
| 5871 ReturnAllocatedHeapNumber(masm, d0, r3, r4, r5); |
| 5872 |
| 5873 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 5874 &u8); |
| 5875 } |
| 5876 |
5701 #undef __ | 5877 #undef __ |
5702 | 5878 |
5703 } // namespace internal | 5879 } // namespace internal |
5704 } // namespace v8 | 5880 } // namespace v8 |
5705 | 5881 |
5706 #endif // V8_TARGET_ARCH_S390 | 5882 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |