OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 5850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5861 ExternalReference::invoke_accessor_getter_callback(isolate()); | 5861 ExternalReference::invoke_accessor_getter_callback(isolate()); |
5862 | 5862 |
5863 // +3 is to skip prolog, return address and name handle. | 5863 // +3 is to skip prolog, return address and name handle. |
5864 Operand return_value_operand( | 5864 Operand return_value_operand( |
5865 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5865 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5866 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5866 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5867 thunk_last_arg, kStackUnwindSpace, nullptr, | 5867 thunk_last_arg, kStackUnwindSpace, nullptr, |
5868 return_value_operand, NULL); | 5868 return_value_operand, NULL); |
5869 } | 5869 } |
5870 | 5870 |
| 5871 namespace { |
| 5872 |
| 5873 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5874 Register object, Register scratch) { |
| 5875 Label offset_is_not_smi, done; |
| 5876 __ mov(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); |
| 5877 __ mov(backing_store, |
| 5878 FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5879 |
| 5880 __ mov(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5881 __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); |
| 5882 // Offset is smi. |
| 5883 __ SmiUntag(scratch); |
| 5884 __ add(backing_store, scratch); |
| 5885 __ jmp(&done, Label::kNear); |
| 5886 |
| 5887 // Offset is a heap number. |
| 5888 __ bind(&offset_is_not_smi); |
| 5889 __ movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); |
| 5890 __ cvttsd2si(scratch, xmm0); |
| 5891 __ add(backing_store, scratch); |
| 5892 __ bind(&done); |
| 5893 } |
| 5894 |
| 5895 |
| 5896 void TaggedToInteger32(MacroAssembler* masm, Register value) { |
| 5897 Label not_smi, done; |
| 5898 __ JumpIfNotSmi(value, ¬_smi, Label::kNear); |
| 5899 __ SmiUntag(value); |
| 5900 __ jmp(&done, Label::kNear); |
| 5901 |
| 5902 __ bind(¬_smi); |
| 5903 __ movsd(xmm0, FieldOperand(value, HeapNumber::kValueOffset)); |
| 5904 __ cvttsd2si(value, xmm0); |
| 5905 __ bind(&done); |
| 5906 } |
| 5907 |
| 5908 |
| 5909 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5910 Register scratch, Register scratch2, |
| 5911 Label* table) { |
| 5912 __ mov(scratch, FieldOperand(object, JSObject::kElementsOffset)); |
| 5913 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5914 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 5915 __ sub(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5916 __ Assert(above_equal, kOffsetOutOfRange); |
| 5917 __ jmp(Operand::JumpTable(scratch, times_4, table)); |
| 5918 } |
| 5919 |
| 5920 |
| 5921 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
| 5922 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5923 Label* u32, Label* u8c) { |
| 5924 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5925 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5926 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5927 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5928 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5929 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5930 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5931 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5932 |
| 5933 Label abort; |
| 5934 __ bind(table); |
| 5935 __ dd(i8); // Int8Array |
| 5936 __ dd(u8); // Uint8Array |
| 5937 __ dd(i16); // Int16Array |
| 5938 __ dd(u16); // Uint16Array |
| 5939 __ dd(i32); // Int32Array |
| 5940 __ dd(u32); // Uint32Array |
| 5941 __ dd(&abort); // Float32Array |
| 5942 __ dd(&abort); // Float64Array |
| 5943 __ dd(u8c); // Uint8ClampedArray |
| 5944 |
| 5945 __ bind(&abort); |
| 5946 __ Abort(kNoReason); |
| 5947 } |
| 5948 |
| 5949 |
| 5950 void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, Register value, |
| 5951 Register scratch, Label* use_heap_number) { |
| 5952 Label not_smi; |
| 5953 if (!value.is(eax)) { |
| 5954 __ mov(eax, value); |
| 5955 } |
| 5956 __ JumpIfNotValidSmiValue(eax, scratch, ¬_smi, Label::kNear); |
| 5957 __ SmiTag(eax); |
| 5958 __ Ret(); |
| 5959 |
| 5960 __ bind(¬_smi); |
| 5961 __ Cvtsi2sd(dst, eax); |
| 5962 __ jmp(use_heap_number); |
| 5963 } |
| 5964 |
| 5965 |
| 5966 void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, |
| 5967 Register value, XMMRegister scratch, |
| 5968 Label* use_heap_number) { |
| 5969 Label not_smi; |
| 5970 if (!value.is(eax)) { |
| 5971 __ mov(eax, value); |
| 5972 } |
| 5973 __ JumpIfUIntNotValidSmiValue(eax, ¬_smi, Label::kNear); |
| 5974 __ SmiTag(eax); |
| 5975 __ Ret(); |
| 5976 |
| 5977 __ bind(¬_smi); |
| 5978 // Convert [0, 2**32-1] -> [-2**31, 2**31-1]. |
| 5979 __ add(eax, Immediate(-0x7fffffff - 1)); // -0x80000000 parses incorrectly. |
| 5980 __ Cvtsi2sd(dst, eax); |
| 5981 __ mov(eax, Immediate(0x4f000000)); // 2**31 as IEEE float |
| 5982 __ movd(scratch, eax); |
| 5983 __ cvtss2sd(scratch, scratch); |
| 5984 __ addsd(dst, scratch); |
| 5985 __ jmp(use_heap_number); |
| 5986 } |
| 5987 |
| 5988 |
| 5989 void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, |
| 5990 Register scratch, Register scratch2) { |
| 5991 Label call_runtime; |
| 5992 __ AllocateHeapNumber(eax, scratch, scratch2, &call_runtime); |
| 5993 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
| 5994 __ Ret(); |
| 5995 |
| 5996 __ bind(&call_runtime); |
| 5997 { |
| 5998 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5999 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 6000 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
| 6001 } |
| 6002 __ Ret(); |
| 6003 } |
| 6004 |
| 6005 } // anonymous namespace |
| 6006 |
| 6007 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 6008 Register object = edx; |
| 6009 Register backing_store = ebx; |
| 6010 Register index = eax; |
| 6011 Label table; |
| 6012 |
| 6013 GetTypedArrayBackingStore(masm, backing_store, object, ecx); |
| 6014 TaggedToInteger32(masm, index); |
| 6015 TypedArrayJumpTablePrologue(masm, object, ecx, esi, &table); |
| 6016 |
| 6017 Label i8, u8, i16, u16, i32, u32; |
| 6018 |
| 6019 __ bind(&i8); |
| 6020 __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
| 6021 __ movsx_b(eax, eax); |
| 6022 __ SmiTag(eax); |
| 6023 __ Ret(); |
| 6024 |
| 6025 __ bind(&u8); |
| 6026 __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
| 6027 __ movzx_b(eax, eax); |
| 6028 __ SmiTag(eax); |
| 6029 __ Ret(); |
| 6030 |
| 6031 __ bind(&i16); |
| 6032 __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
| 6033 __ movsx_w(eax, eax); |
| 6034 __ SmiTag(eax); |
| 6035 __ Ret(); |
| 6036 |
| 6037 __ bind(&u16); |
| 6038 __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
| 6039 __ movzx_w(eax, eax); |
| 6040 __ SmiTag(eax); |
| 6041 __ Ret(); |
| 6042 |
| 6043 Label use_heap_number; |
| 6044 |
| 6045 __ bind(&i32); |
| 6046 __ mov(eax, Operand(backing_store, index, times_4, 0)); |
| 6047 ReturnInteger32(masm, xmm0, eax, ecx, &use_heap_number); |
| 6048 |
| 6049 __ bind(&u32); |
| 6050 __ mov(eax, Operand(backing_store, index, times_4, 0)); |
| 6051 ReturnUnsignedInteger32(masm, xmm0, eax, xmm1, &use_heap_number); |
| 6052 |
| 6053 __ bind(&use_heap_number); |
| 6054 ReturnAllocatedHeapNumber(masm, xmm0, ecx, edx); |
| 6055 |
| 6056 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 6057 &u8); |
| 6058 } |
| 6059 |
5871 | 6060 |
5872 #undef __ | 6061 #undef __ |
5873 | 6062 |
5874 } // namespace internal | 6063 } // namespace internal |
5875 } // namespace v8 | 6064 } // namespace v8 |
5876 | 6065 |
5877 #endif // V8_TARGET_ARCH_IA32 | 6066 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |