OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 5546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5557 !api_function_address.is(name_arg)); | 5557 !api_function_address.is(name_arg)); |
5558 | 5558 |
5559 // +3 is to skip prolog, return address and name handle. | 5559 // +3 is to skip prolog, return address and name handle. |
5560 Operand return_value_operand( | 5560 Operand return_value_operand( |
5561 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5561 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5562 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5562 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
5563 kStackUnwindSpace, nullptr, return_value_operand, | 5563 kStackUnwindSpace, nullptr, return_value_operand, |
5564 NULL); | 5564 NULL); |
5565 } | 5565 } |
5566 | 5566 |
5567 namespace { | |
5568 | |
5569 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, | |
5570 Register object, Register scratch) { | |
5571 Label offset_is_not_smi, done; | |
5572 __ movp(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); | |
5573 __ movp(backing_store, | |
5574 FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); | |
5575 | |
5576 __ movp(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); | |
5577 __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); | |
5578 // offset is smi | |
5579 __ SmiToInteger32(scratch, scratch); | |
5580 __ addp(backing_store, scratch); | |
5581 __ jmp(&done, Label::kNear); | |
5582 | |
5583 // offset is a heap number | |
5584 __ bind(&offset_is_not_smi); | |
5585 __ Movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); | |
5586 __ Cvttsd2siq(scratch, xmm0); | |
5587 __ addp(backing_store, scratch); | |
5588 __ bind(&done); | |
5589 } | |
5590 | |
5591 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, | |
5592 Register scratch, Register scratch2, | |
5593 Label* table) { | |
5594 __ movp(scratch, FieldOperand(object, JSObject::kElementsOffset)); | |
5595 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | |
5596 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); | |
5597 __ subl(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); | |
5598 __ Assert(above_equal, kOffsetOutOfRange); | |
5599 __ leaq(scratch2, Operand(table)); | |
5600 __ jmp(Operand(scratch2, scratch, times_8, 0)); | |
5601 } | |
5602 | |
5603 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, | |
5604 Label* u8, Label* i16, Label* u16, Label* i32, | |
5605 Label* u32, Label* u8c) { | |
5606 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); | |
5607 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); | |
5608 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); | |
5609 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); | |
5610 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); | |
5611 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); | |
5612 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); | |
5613 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); | |
5614 | |
5615 Label abort; | |
5616 __ bind(table); | |
5617 __ dq(i8); // Int8Array | |
5618 __ dq(u8); // Uint8Array | |
5619 __ dq(i16); // Int16Array | |
5620 __ dq(u16); // Uint16Array | |
5621 __ dq(i32); // Int32Array | |
5622 __ dq(u32); // Uint32Array | |
5623 __ dq(&abort); // Float32Array | |
5624 __ dq(&abort); // Float64Array | |
5625 __ dq(u8c); // Uint8ClampedArray | |
5626 | |
5627 __ bind(&abort); | |
5628 __ Abort(kNoReason); | |
5629 } | |
5630 | |
5631 void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, Register value, | |
5632 Label* use_heap_number) { | |
5633 Label not_smi; | |
5634 if (!value.is(rax)) { | |
5635 __ movp(rax, value); | |
5636 } | |
5637 __ JumpIfNotValidSmiValue(rax, ¬_smi, Label::kNear); | |
5638 __ Integer32ToSmi(rax, rax); | |
5639 __ Ret(); | |
5640 | |
5641 __ bind(¬_smi); | |
5642 __ Cvtlsi2sd(dst, rax); | |
5643 __ jmp(use_heap_number); | |
5644 } | |
5645 | |
5646 void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, | |
5647 Register value, Label* use_heap_number) { | |
5648 Label not_smi; | |
5649 if (!value.is(rax)) { | |
5650 __ movp(rax, value); | |
5651 } | |
5652 __ JumpIfUIntNotValidSmiValue(rax, ¬_smi, Label::kNear); | |
5653 __ Integer32ToSmi(rax, rax); | |
5654 __ Ret(); | |
5655 | |
5656 __ bind(¬_smi); | |
5657 __ Cvtqsi2sd(dst, rax); | |
5658 __ jmp(use_heap_number); | |
5659 } | |
5660 | |
5661 void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, | |
5662 Register scratch) { | |
5663 Label call_runtime; | |
5664 __ AllocateHeapNumber(rax, scratch, &call_runtime); | |
5665 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); | |
5666 __ Ret(); | |
5667 | |
5668 __ bind(&call_runtime); | |
5669 { | |
5670 FrameScope scope(masm, StackFrame::INTERNAL); | |
5671 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | |
5672 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); | |
5673 } | |
5674 __ Ret(); | |
5675 } | |
5676 | |
5677 } // anonymous namespace | |
5678 | |
5679 void AtomicsLoadStub::Generate(MacroAssembler* masm) { | |
5680 Register object = rdx; | |
5681 Register index = rax; // Index is an untagged word32. | |
5682 Register backing_store = rbx; | |
5683 Label table; | |
5684 | |
5685 GetTypedArrayBackingStore(masm, backing_store, object, kScratchRegister); | |
5686 TypedArrayJumpTablePrologue(masm, object, rcx, kScratchRegister, &table); | |
5687 | |
5688 Label i8, u8, i16, u16, i32, u32; | |
5689 | |
5690 __ bind(&i8); | |
5691 __ movb(rax, Operand(backing_store, index, times_1, 0)); | |
5692 __ movsxbl(rax, rax); | |
5693 __ Integer32ToSmi(rax, rax); | |
5694 __ Ret(); | |
5695 | |
5696 __ bind(&u8); | |
5697 __ movb(rax, Operand(backing_store, index, times_1, 0)); | |
5698 __ movzxbl(rax, rax); | |
5699 __ Integer32ToSmi(rax, rax); | |
5700 __ Ret(); | |
5701 | |
5702 __ bind(&i16); | |
5703 __ movw(rax, Operand(backing_store, index, times_2, 0)); | |
5704 __ movsxwl(rax, rax); | |
5705 __ Integer32ToSmi(rax, rax); | |
5706 __ Ret(); | |
5707 | |
5708 __ bind(&u16); | |
5709 __ movw(rax, Operand(backing_store, index, times_2, 0)); | |
5710 __ movzxwl(rax, rax); | |
5711 __ Integer32ToSmi(rax, rax); | |
5712 __ Ret(); | |
5713 | |
5714 Label use_heap_number; | |
5715 | |
5716 __ bind(&i32); | |
5717 __ movl(rax, Operand(backing_store, index, times_4, 0)); | |
5718 ReturnInteger32(masm, xmm0, rax, &use_heap_number); | |
5719 | |
5720 __ bind(&u32); | |
5721 __ movl(rax, Operand(backing_store, index, times_4, 0)); | |
5722 ReturnUnsignedInteger32(masm, xmm0, rax, &use_heap_number); | |
5723 | |
5724 __ bind(&use_heap_number); | |
5725 ReturnAllocatedHeapNumber(masm, xmm0, rcx); | |
5726 | |
5727 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, | |
5728 &u8); | |
5729 } | |
5730 | |
5731 #undef __ | 5567 #undef __ |
5732 | 5568 |
5733 } // namespace internal | 5569 } // namespace internal |
5734 } // namespace v8 | 5570 } // namespace v8 |
5735 | 5571 |
5736 #endif // V8_TARGET_ARCH_X64 | 5572 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |