OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 5921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5932 | 5932 |
5933 const int spill_offset = 1 + kApiStackSpace; | 5933 const int spill_offset = 1 + kApiStackSpace; |
5934 // +3 is to skip prolog, return address and name handle. | 5934 // +3 is to skip prolog, return address and name handle. |
5935 MemOperand return_value_operand( | 5935 MemOperand return_value_operand( |
5936 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5936 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5937 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5937 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5938 kStackUnwindSpace, NULL, spill_offset, | 5938 kStackUnwindSpace, NULL, spill_offset, |
5939 return_value_operand, NULL); | 5939 return_value_operand, NULL); |
5940 } | 5940 } |
5941 | 5941 |
| 5942 namespace { |
| 5943 |
| 5944 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5945 Register object, Register scratch, |
| 5946 FPRegister double_scratch) { |
| 5947 Label offset_is_not_smi, done; |
| 5948 __ Ldr(scratch, FieldMemOperand(object, JSTypedArray::kBufferOffset)); |
| 5949 __ Ldr(backing_store, |
| 5950 FieldMemOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5951 __ Ldr(scratch, |
| 5952 FieldMemOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5953 __ JumpIfNotSmi(scratch, &offset_is_not_smi); |
| 5954 // offset is smi |
| 5955 __ Add(backing_store, backing_store, Operand::UntagSmi(scratch)); |
| 5956 __ B(&done); |
| 5957 |
| 5958 // offset is a heap number |
| 5959 __ Bind(&offset_is_not_smi); |
| 5960 __ Ldr(double_scratch, FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
| 5961 __ Fcvtzu(scratch, double_scratch); |
| 5962 __ Add(backing_store, backing_store, scratch); |
| 5963 __ Bind(&done); |
| 5964 } |
| 5965 |
| 5966 void TypedArrayJumpTable(MacroAssembler* masm, Register object, |
| 5967 Register scratch, Register scratch2, Label* i8, |
| 5968 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5969 Label* u32, Label* u8c) { |
| 5970 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5971 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5972 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5973 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5974 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5975 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5976 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5977 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5978 |
| 5979 __ Ldr(scratch, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 5980 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 5981 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 5982 __ Subs(scratch, scratch, |
| 5983 Operand(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5984 __ Assert(ge, kOffsetOutOfRange); |
| 5985 |
| 5986 Label abort; |
| 5987 Label table; |
| 5988 |
| 5989 __ Adr(scratch2, &table); |
| 5990 __ Add(scratch, scratch2, Operand(scratch, UXTW, 2)); |
| 5991 __ Br(scratch); |
| 5992 |
| 5993 __ StartBlockPools(); |
| 5994 __ Bind(&table); |
| 5995 __ B(i8); // Int8Array |
| 5996 __ B(u8); // Uint8Array |
| 5997 __ B(i16); // Int16Array |
| 5998 __ B(u16); // Uint16Array |
| 5999 __ B(i32); // Int32Array |
| 6000 __ B(u32); // Uint32Array |
| 6001 __ B(&abort); // Float32Array |
| 6002 __ B(&abort); // Float64Array |
| 6003 __ B(u8c); // Uint8ClampedArray |
| 6004 __ EndBlockPools(); |
| 6005 |
| 6006 __ Bind(&abort); |
| 6007 __ Abort(kNoReason); |
| 6008 } |
| 6009 |
| 6010 void ReturnUnsignedInteger32(MacroAssembler* masm, FPRegister dst, |
| 6011 Register value, Register scratch, |
| 6012 Register scratch2) { |
| 6013 Label not_smi, call_runtime; |
| 6014 __ Tbnz(value, 31, ¬_smi); |
| 6015 __ SmiTag(x0, value); |
| 6016 __ Ret(); |
| 6017 |
| 6018 __ Bind(¬_smi); |
| 6019 __ Ucvtf(dst, value); |
| 6020 __ AllocateHeapNumber(x0, &call_runtime, scratch, scratch2, dst); |
| 6021 __ Ret(); |
| 6022 |
| 6023 __ Bind(&call_runtime); |
| 6024 { |
| 6025 FrameScope scope(masm, StackFrame::INTERNAL); |
| 6026 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 6027 __ Str(value, FieldMemOperand(x0, HeapNumber::kValueOffset)); |
| 6028 } |
| 6029 __ Ret(); |
| 6030 } |
| 6031 |
| 6032 } // anonymous namespace |
| 6033 |
| 6034 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 6035 Register object = x1; |
| 6036 Register index = x0; // Index is an untagged word32. |
| 6037 Register backing_store = x2; |
| 6038 Label i8, u8, i16, u16, i32, u32; |
| 6039 |
| 6040 GetTypedArrayBackingStore(masm, backing_store, object, x3, d0); |
| 6041 TypedArrayJumpTable(masm, object, x3, x4, &i8, &u8, &i16, &u16, &i32, &u32, |
| 6042 &u8); |
| 6043 |
| 6044 __ Bind(&i8); |
| 6045 __ Ldrsb(x0, MemOperand(backing_store, index)); |
| 6046 __ Dmb(InnerShareable, BarrierAll); |
| 6047 __ SmiTag(x0); |
| 6048 __ Ret(); |
| 6049 |
| 6050 __ Bind(&u8); |
| 6051 __ Ldrb(x0, MemOperand(backing_store, index)); |
| 6052 __ Dmb(InnerShareable, BarrierAll); |
| 6053 __ SmiTag(x0); |
| 6054 __ Ret(); |
| 6055 |
| 6056 __ Bind(&i16); |
| 6057 __ Ldrsh(x0, MemOperand(backing_store, index, UXTW, 1)); |
| 6058 __ Dmb(InnerShareable, BarrierAll); |
| 6059 __ SmiTag(x0); |
| 6060 __ Ret(); |
| 6061 |
| 6062 __ Bind(&u16); |
| 6063 __ Ldrh(x0, MemOperand(backing_store, index, UXTW, 1)); |
| 6064 __ Dmb(InnerShareable, BarrierAll); |
| 6065 __ SmiTag(x0); |
| 6066 __ Ret(); |
| 6067 |
| 6068 __ Bind(&i32); |
| 6069 __ Ldrsw(x0, MemOperand(backing_store, index, UXTW, 2)); |
| 6070 __ Dmb(InnerShareable, BarrierAll); |
| 6071 DCHECK(SmiValuesAre32Bits()); |
| 6072 __ SmiTag(x0); |
| 6073 __ Ret(); |
| 6074 |
| 6075 __ Bind(&u32); |
| 6076 __ Ldr(w0, MemOperand(backing_store, index, UXTW, 2)); |
| 6077 __ Dmb(InnerShareable, BarrierAll); |
| 6078 ReturnUnsignedInteger32(masm, d0, x0, x1, x2); |
| 6079 } |
5942 | 6080 |
5943 #undef __ | 6081 #undef __ |
5944 | 6082 |
5945 } // namespace internal | 6083 } // namespace internal |
5946 } // namespace v8 | 6084 } // namespace v8 |
5947 | 6085 |
5948 #endif // V8_TARGET_ARCH_ARM64 | 6086 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |