OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 5821 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5832 ExternalReference::invoke_accessor_getter_callback(isolate()); | 5832 ExternalReference::invoke_accessor_getter_callback(isolate()); |
5833 | 5833 |
5834 // +3 is to skip prolog, return address and name handle. | 5834 // +3 is to skip prolog, return address and name handle. |
5835 Operand return_value_operand( | 5835 Operand return_value_operand( |
5836 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5836 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5837 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5837 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5838 thunk_last_arg, kStackUnwindSpace, nullptr, | 5838 thunk_last_arg, kStackUnwindSpace, nullptr, |
5839 return_value_operand, NULL); | 5839 return_value_operand, NULL); |
5840 } | 5840 } |
5841 | 5841 |
| 5842 namespace { |
| 5843 |
| 5844 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5845 Register object, Register scratch) { |
| 5846 Label offset_is_not_smi, done; |
| 5847 __ mov(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); |
| 5848 __ mov(backing_store, |
| 5849 FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5850 |
| 5851 __ mov(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5852 __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); |
| 5853 // Offset is smi. |
| 5854 __ SmiUntag(scratch); |
| 5855 __ add(backing_store, scratch); |
| 5856 __ jmp(&done, Label::kNear); |
| 5857 |
| 5858 // Offset is a heap number. |
| 5859 __ bind(&offset_is_not_smi); |
| 5860 __ movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); |
| 5861 __ cvttsd2si(scratch, xmm0); |
| 5862 __ add(backing_store, scratch); |
| 5863 __ bind(&done); |
| 5864 } |
| 5865 |
| 5866 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5867 Register scratch, Register scratch2, |
| 5868 Label* table) { |
| 5869 __ mov(scratch, FieldOperand(object, JSObject::kElementsOffset)); |
| 5870 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5871 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 5872 __ sub(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5873 __ Assert(above_equal, kOffsetOutOfRange); |
| 5874 __ jmp(Operand::JumpTable(scratch, times_4, table)); |
| 5875 } |
| 5876 |
| 5877 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
| 5878 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5879 Label* u32, Label* u8c) { |
| 5880 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5881 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5882 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5883 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5884 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5885 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5886 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5887 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5888 |
| 5889 Label abort; |
| 5890 __ bind(table); |
| 5891 __ dd(i8); // Int8Array |
| 5892 __ dd(u8); // Uint8Array |
| 5893 __ dd(i16); // Int16Array |
| 5894 __ dd(u16); // Uint16Array |
| 5895 __ dd(i32); // Int32Array |
| 5896 __ dd(u32); // Uint32Array |
| 5897 __ dd(&abort); // Float32Array |
| 5898 __ dd(&abort); // Float64Array |
| 5899 __ dd(u8c); // Uint8ClampedArray |
| 5900 |
| 5901 __ bind(&abort); |
| 5902 __ Abort(kNoReason); |
| 5903 } |
| 5904 |
| 5905 void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, Register value, |
| 5906 Register scratch, Label* use_heap_number) { |
| 5907 Label not_smi; |
| 5908 if (!value.is(eax)) { |
| 5909 __ mov(eax, value); |
| 5910 } |
| 5911 __ JumpIfNotValidSmiValue(eax, scratch, ¬_smi, Label::kNear); |
| 5912 __ SmiTag(eax); |
| 5913 __ Ret(); |
| 5914 |
| 5915 __ bind(¬_smi); |
| 5916 __ Cvtsi2sd(dst, eax); |
| 5917 __ jmp(use_heap_number); |
| 5918 } |
| 5919 |
| 5920 void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, |
| 5921 Register value, XMMRegister scratch, |
| 5922 Label* use_heap_number) { |
| 5923 Label not_smi; |
| 5924 if (!value.is(eax)) { |
| 5925 __ mov(eax, value); |
| 5926 } |
| 5927 __ JumpIfUIntNotValidSmiValue(eax, ¬_smi, Label::kNear); |
| 5928 __ SmiTag(eax); |
| 5929 __ Ret(); |
| 5930 |
| 5931 __ bind(¬_smi); |
| 5932 // Convert [0, 2**32-1] -> [-2**31, 2**31-1]. |
| 5933 __ add(eax, Immediate(-0x7fffffff - 1)); // -0x80000000 parses incorrectly. |
| 5934 __ Cvtsi2sd(dst, eax); |
| 5935 __ mov(eax, Immediate(0x4f000000)); // 2**31 as IEEE float |
| 5936 __ movd(scratch, eax); |
| 5937 __ cvtss2sd(scratch, scratch); |
| 5938 __ addsd(dst, scratch); |
| 5939 __ jmp(use_heap_number); |
| 5940 } |
| 5941 |
| 5942 void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, |
| 5943 Register scratch, Register scratch2) { |
| 5944 Label call_runtime; |
| 5945 __ AllocateHeapNumber(eax, scratch, scratch2, &call_runtime); |
| 5946 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
| 5947 __ Ret(); |
| 5948 |
| 5949 __ bind(&call_runtime); |
| 5950 { |
| 5951 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5952 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5953 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
| 5954 } |
| 5955 __ Ret(); |
| 5956 } |
| 5957 |
| 5958 } // anonymous namespace |
| 5959 |
| 5960 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5961 Register object = edx; |
| 5962 Register index = eax; // Index is an untagged word32. |
| 5963 Register backing_store = ebx; |
| 5964 Label table; |
| 5965 |
| 5966 GetTypedArrayBackingStore(masm, backing_store, object, ecx); |
| 5967 TypedArrayJumpTablePrologue(masm, object, ecx, esi, &table); |
| 5968 |
| 5969 Label i8, u8, i16, u16, i32, u32; |
| 5970 |
| 5971 __ bind(&i8); |
| 5972 __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
| 5973 __ movsx_b(eax, eax); |
| 5974 __ SmiTag(eax); |
| 5975 __ Ret(); |
| 5976 |
| 5977 __ bind(&u8); |
| 5978 __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
| 5979 __ movzx_b(eax, eax); |
| 5980 __ SmiTag(eax); |
| 5981 __ Ret(); |
| 5982 |
| 5983 __ bind(&i16); |
| 5984 __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
| 5985 __ movsx_w(eax, eax); |
| 5986 __ SmiTag(eax); |
| 5987 __ Ret(); |
| 5988 |
| 5989 __ bind(&u16); |
| 5990 __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
| 5991 __ movzx_w(eax, eax); |
| 5992 __ SmiTag(eax); |
| 5993 __ Ret(); |
| 5994 |
| 5995 Label use_heap_number; |
| 5996 |
| 5997 __ bind(&i32); |
| 5998 __ mov(eax, Operand(backing_store, index, times_4, 0)); |
| 5999 ReturnInteger32(masm, xmm0, eax, ecx, &use_heap_number); |
| 6000 |
| 6001 __ bind(&u32); |
| 6002 __ mov(eax, Operand(backing_store, index, times_4, 0)); |
| 6003 ReturnUnsignedInteger32(masm, xmm0, eax, xmm1, &use_heap_number); |
| 6004 |
| 6005 __ bind(&use_heap_number); |
| 6006 ReturnAllocatedHeapNumber(masm, xmm0, ecx, edx); |
| 6007 |
| 6008 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 6009 &u8); |
| 6010 } |
5842 | 6011 |
5843 #undef __ | 6012 #undef __ |
5844 | 6013 |
5845 } // namespace internal | 6014 } // namespace internal |
5846 } // namespace v8 | 6015 } // namespace v8 |
5847 | 6016 |
5848 #endif // V8_TARGET_ARCH_IA32 | 6017 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |