| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 5916 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5927 | 5927 |
| 5928 const int spill_offset = 1 + kApiStackSpace; | 5928 const int spill_offset = 1 + kApiStackSpace; |
| 5929 // +3 is to skip prolog, return address and name handle. | 5929 // +3 is to skip prolog, return address and name handle. |
| 5930 MemOperand return_value_operand( | 5930 MemOperand return_value_operand( |
| 5931 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5931 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
| 5932 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5932 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
| 5933 kStackUnwindSpace, NULL, spill_offset, | 5933 kStackUnwindSpace, NULL, spill_offset, |
| 5934 return_value_operand, NULL); | 5934 return_value_operand, NULL); |
| 5935 } | 5935 } |
| 5936 | 5936 |
| 5937 namespace { | |
| 5938 | |
| 5939 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, | |
| 5940 Register object, Register scratch, | |
| 5941 FPRegister double_scratch) { | |
| 5942 Label offset_is_not_smi, done; | |
| 5943 __ Ldr(scratch, FieldMemOperand(object, JSTypedArray::kBufferOffset)); | |
| 5944 __ Ldr(backing_store, | |
| 5945 FieldMemOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); | |
| 5946 __ Ldr(scratch, | |
| 5947 FieldMemOperand(object, JSArrayBufferView::kByteOffsetOffset)); | |
| 5948 __ JumpIfNotSmi(scratch, &offset_is_not_smi); | |
| 5949 // offset is smi | |
| 5950 __ Add(backing_store, backing_store, Operand::UntagSmi(scratch)); | |
| 5951 __ B(&done); | |
| 5952 | |
| 5953 // offset is a heap number | |
| 5954 __ Bind(&offset_is_not_smi); | |
| 5955 __ Ldr(double_scratch, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | |
| 5956 __ Fcvtzu(scratch, double_scratch); | |
| 5957 __ Add(backing_store, backing_store, scratch); | |
| 5958 __ Bind(&done); | |
| 5959 } | |
| 5960 | |
| 5961 void TypedArrayJumpTable(MacroAssembler* masm, Register object, | |
| 5962 Register scratch, Register scratch2, Label* i8, | |
| 5963 Label* u8, Label* i16, Label* u16, Label* i32, | |
| 5964 Label* u32, Label* u8c) { | |
| 5965 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); | |
| 5966 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); | |
| 5967 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); | |
| 5968 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); | |
| 5969 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); | |
| 5970 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); | |
| 5971 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); | |
| 5972 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); | |
| 5973 | |
| 5974 __ Ldr(scratch, FieldMemOperand(object, JSObject::kElementsOffset)); | |
| 5975 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
| 5976 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | |
| 5977 __ Subs(scratch, scratch, | |
| 5978 Operand(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); | |
| 5979 __ Assert(ge, kOffsetOutOfRange); | |
| 5980 | |
| 5981 Label abort; | |
| 5982 Label table; | |
| 5983 | |
| 5984 __ Adr(scratch2, &table); | |
| 5985 __ Add(scratch, scratch2, Operand(scratch, UXTW, 2)); | |
| 5986 __ Br(scratch); | |
| 5987 | |
| 5988 __ StartBlockPools(); | |
| 5989 __ Bind(&table); | |
| 5990 __ B(i8); // Int8Array | |
| 5991 __ B(u8); // Uint8Array | |
| 5992 __ B(i16); // Int16Array | |
| 5993 __ B(u16); // Uint16Array | |
| 5994 __ B(i32); // Int32Array | |
| 5995 __ B(u32); // Uint32Array | |
| 5996 __ B(&abort); // Float32Array | |
| 5997 __ B(&abort); // Float64Array | |
| 5998 __ B(u8c); // Uint8ClampedArray | |
| 5999 __ EndBlockPools(); | |
| 6000 | |
| 6001 __ Bind(&abort); | |
| 6002 __ Abort(kNoReason); | |
| 6003 } | |
| 6004 | |
| 6005 void ReturnUnsignedInteger32(MacroAssembler* masm, FPRegister dst, | |
| 6006 Register value, Register scratch, | |
| 6007 Register scratch2) { | |
| 6008 Label not_smi, call_runtime; | |
| 6009 __ Tbnz(value, 31, ¬_smi); | |
| 6010 __ SmiTag(x0, value); | |
| 6011 __ Ret(); | |
| 6012 | |
| 6013 __ Bind(¬_smi); | |
| 6014 __ Ucvtf(dst, value); | |
| 6015 __ AllocateHeapNumber(x0, &call_runtime, scratch, scratch2, dst); | |
| 6016 __ Ret(); | |
| 6017 | |
| 6018 __ Bind(&call_runtime); | |
| 6019 { | |
| 6020 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 6021 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | |
| 6022 __ Str(value, FieldMemOperand(x0, HeapNumber::kValueOffset)); | |
| 6023 } | |
| 6024 __ Ret(); | |
| 6025 } | |
| 6026 | |
| 6027 } // anonymous namespace | |
| 6028 | |
| 6029 void AtomicsLoadStub::Generate(MacroAssembler* masm) { | |
| 6030 Register object = x1; | |
| 6031 Register index = x0; // Index is an untagged word32. | |
| 6032 Register backing_store = x2; | |
| 6033 Label i8, u8, i16, u16, i32, u32; | |
| 6034 | |
| 6035 GetTypedArrayBackingStore(masm, backing_store, object, x3, d0); | |
| 6036 TypedArrayJumpTable(masm, object, x3, x4, &i8, &u8, &i16, &u16, &i32, &u32, | |
| 6037 &u8); | |
| 6038 | |
| 6039 __ Bind(&i8); | |
| 6040 __ Ldrsb(x0, MemOperand(backing_store, index)); | |
| 6041 __ Dmb(InnerShareable, BarrierAll); | |
| 6042 __ SmiTag(x0); | |
| 6043 __ Ret(); | |
| 6044 | |
| 6045 __ Bind(&u8); | |
| 6046 __ Ldrb(x0, MemOperand(backing_store, index)); | |
| 6047 __ Dmb(InnerShareable, BarrierAll); | |
| 6048 __ SmiTag(x0); | |
| 6049 __ Ret(); | |
| 6050 | |
| 6051 __ Bind(&i16); | |
| 6052 __ Ldrsh(x0, MemOperand(backing_store, index, UXTW, 1)); | |
| 6053 __ Dmb(InnerShareable, BarrierAll); | |
| 6054 __ SmiTag(x0); | |
| 6055 __ Ret(); | |
| 6056 | |
| 6057 __ Bind(&u16); | |
| 6058 __ Ldrh(x0, MemOperand(backing_store, index, UXTW, 1)); | |
| 6059 __ Dmb(InnerShareable, BarrierAll); | |
| 6060 __ SmiTag(x0); | |
| 6061 __ Ret(); | |
| 6062 | |
| 6063 __ Bind(&i32); | |
| 6064 __ Ldrsw(x0, MemOperand(backing_store, index, UXTW, 2)); | |
| 6065 __ Dmb(InnerShareable, BarrierAll); | |
| 6066 DCHECK(SmiValuesAre32Bits()); | |
| 6067 __ SmiTag(x0); | |
| 6068 __ Ret(); | |
| 6069 | |
| 6070 __ Bind(&u32); | |
| 6071 __ Ldr(w0, MemOperand(backing_store, index, UXTW, 2)); | |
| 6072 __ Dmb(InnerShareable, BarrierAll); | |
| 6073 ReturnUnsignedInteger32(masm, d0, x0, x1, x2); | |
| 6074 } | |
| 6075 | |
| 6076 #undef __ | 5937 #undef __ |
| 6077 | 5938 |
| 6078 } // namespace internal | 5939 } // namespace internal |
| 6079 } // namespace v8 | 5940 } // namespace v8 |
| 6080 | 5941 |
| 6081 #endif // V8_TARGET_ARCH_ARM64 | 5942 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |