Index: src/ia32/code-stubs-ia32.cc |
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc |
index 657c6efb3585714a37d0d089727ed2995cb9c06e..52f1d94e966752dbcd677be3eac7163f9e5257c8 100644 |
--- a/src/ia32/code-stubs-ia32.cc |
+++ b/src/ia32/code-stubs-ia32.cc |
@@ -5839,6 +5839,176 @@ |
return_value_operand, NULL); |
} |
+namespace { |
+ |
+void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
+ Register object, Register scratch) { |
+ Label offset_is_not_smi, done; |
+ __ mov(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); |
+ __ mov(backing_store, |
+ FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
+ |
+ __ mov(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
+ __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); |
+ // Offset is smi. |
+ __ SmiUntag(scratch); |
+ __ add(backing_store, scratch); |
+ __ jmp(&done, Label::kNear); |
+ |
+ // Offset is a heap number. |
+ __ bind(&offset_is_not_smi); |
+ __ movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); |
+ __ cvttsd2si(scratch, xmm0); |
+ __ add(backing_store, scratch); |
+ __ bind(&done); |
+} |
+ |
+void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
+ Register scratch, Register scratch2, |
+ Label* table) { |
+ __ mov(scratch, FieldOperand(object, JSObject::kElementsOffset)); |
+ __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
+ __ sub(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
+ __ Assert(above_equal, kOffsetOutOfRange); |
+ __ jmp(Operand::JumpTable(scratch, times_4, table)); |
+} |
+ |
+void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
+ Label* u8, Label* i16, Label* u16, Label* i32, |
+ Label* u32, Label* u8c) { |
+ STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
+ STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
+ STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
+ STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
+ STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
+ STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
+ STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
+ STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
+ |
+ Label abort; |
+ __ bind(table); |
+ __ dd(i8); // Int8Array |
+ __ dd(u8); // Uint8Array |
+ __ dd(i16); // Int16Array |
+ __ dd(u16); // Uint16Array |
+ __ dd(i32); // Int32Array |
+ __ dd(u32); // Uint32Array |
+ __ dd(&abort); // Float32Array |
+ __ dd(&abort); // Float64Array |
+ __ dd(u8c); // Uint8ClampedArray |
+ |
+ __ bind(&abort); |
+ __ Abort(kNoReason); |
+} |
+ |
+void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, Register value, |
+ Register scratch, Label* use_heap_number) { |
+ Label not_smi; |
+ if (!value.is(eax)) { |
+ __ mov(eax, value); |
+ } |
+ __ JumpIfNotValidSmiValue(eax, scratch, ¬_smi, Label::kNear); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ __ bind(¬_smi); |
+ __ Cvtsi2sd(dst, eax); |
+ __ jmp(use_heap_number); |
+} |
+ |
+void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, |
+ Register value, XMMRegister scratch, |
+ Label* use_heap_number) { |
+ Label not_smi; |
+ if (!value.is(eax)) { |
+ __ mov(eax, value); |
+ } |
+ __ JumpIfUIntNotValidSmiValue(eax, ¬_smi, Label::kNear); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ __ bind(¬_smi); |
+ // Convert [0, 2**32-1] -> [-2**31, 2**31-1]. |
+ __ add(eax, Immediate(-0x7fffffff - 1)); // -0x80000000 parses incorrectly. |
+ __ Cvtsi2sd(dst, eax); |
+ __ mov(eax, Immediate(0x4f000000)); // 2**31 as IEEE float |
+ __ movd(scratch, eax); |
+ __ cvtss2sd(scratch, scratch); |
+ __ addsd(dst, scratch); |
+ __ jmp(use_heap_number); |
+} |
+ |
+void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, |
+ Register scratch, Register scratch2) { |
+ Label call_runtime; |
+ __ AllocateHeapNumber(eax, scratch, scratch2, &call_runtime); |
+ __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
+ __ Ret(); |
+ |
+ __ bind(&call_runtime); |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
+ __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), value); |
+ } |
+ __ Ret(); |
+} |
+ |
+} // anonymous namespace |
+ |
+void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
+ Register object = edx; |
+ Register index = eax; // Index is an untagged word32. |
+ Register backing_store = ebx; |
+ Label table; |
+ |
+ GetTypedArrayBackingStore(masm, backing_store, object, ecx); |
+ TypedArrayJumpTablePrologue(masm, object, ecx, esi, &table); |
+ |
+ Label i8, u8, i16, u16, i32, u32; |
+ |
+ __ bind(&i8); |
+ __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
+ __ movsx_b(eax, eax); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ __ bind(&u8); |
+ __ mov_b(eax, Operand(backing_store, index, times_1, 0)); |
+ __ movzx_b(eax, eax); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ __ bind(&i16); |
+ __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
+ __ movsx_w(eax, eax); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ __ bind(&u16); |
+ __ mov_w(eax, Operand(backing_store, index, times_2, 0)); |
+ __ movzx_w(eax, eax); |
+ __ SmiTag(eax); |
+ __ Ret(); |
+ |
+ Label use_heap_number; |
+ |
+ __ bind(&i32); |
+ __ mov(eax, Operand(backing_store, index, times_4, 0)); |
+ ReturnInteger32(masm, xmm0, eax, ecx, &use_heap_number); |
+ |
+ __ bind(&u32); |
+ __ mov(eax, Operand(backing_store, index, times_4, 0)); |
+ ReturnUnsignedInteger32(masm, xmm0, eax, xmm1, &use_heap_number); |
+ |
+ __ bind(&use_heap_number); |
+ ReturnAllocatedHeapNumber(masm, xmm0, ecx, edx); |
+ |
+ TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
+ &u8); |
+} |
+ |
#undef __ |
} // namespace internal |