OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 5570 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5581 | 5581 |
5582 // +3 is to skip prolog, return address and name handle. | 5582 // +3 is to skip prolog, return address and name handle. |
5583 Operand return_value_operand( | 5583 Operand return_value_operand( |
5584 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5584 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5585 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5585 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
5586 kStackUnwindSpace, nullptr, return_value_operand, | 5586 kStackUnwindSpace, nullptr, return_value_operand, |
5587 NULL); | 5587 NULL); |
5588 } | 5588 } |
5589 | 5589 |
5590 | 5590 |
| 5591 namespace { |
| 5592 |
| 5593 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5594 Register object, Register scratch) { |
| 5595 Label offset_is_not_smi, done; |
| 5596 __ movp(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); |
| 5597 __ movp(backing_store, |
| 5598 FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5599 |
| 5600 __ movp(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5601 __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); |
| 5602 // offset is smi |
| 5603 __ SmiToInteger32(scratch, scratch); |
| 5604 __ addp(backing_store, scratch); |
| 5605 __ jmp(&done, Label::kNear); |
| 5606 |
| 5607 // offset is a heap number |
| 5608 __ bind(&offset_is_not_smi); |
| 5609 __ Movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); |
| 5610 __ Cvttsd2siq(scratch, xmm0); |
| 5611 __ addp(backing_store, scratch); |
| 5612 __ bind(&done); |
| 5613 } |
| 5614 |
| 5615 |
| 5616 void TaggedToInteger32(MacroAssembler* masm, Register value) { |
| 5617 Label not_smi, done; |
| 5618 __ JumpIfNotSmi(value, ¬_smi, Label::kNear); |
| 5619 __ SmiToInteger32(value, value); |
| 5620 __ jmp(&done, Label::kNear); |
| 5621 |
| 5622 __ bind(¬_smi); |
| 5623 __ Movsd(xmm0, FieldOperand(value, HeapNumber::kValueOffset)); |
| 5624 __ Cvttsd2siq(value, xmm0); |
| 5625 __ bind(&done); |
| 5626 } |
| 5627 |
| 5628 |
| 5629 void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5630 Register scratch, Register scratch2, |
| 5631 Label* table) { |
| 5632 __ movp(scratch, FieldOperand(object, JSObject::kElementsOffset)); |
| 5633 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5634 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 5635 __ subl(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5636 __ Assert(above_equal, kOffsetOutOfRange); |
| 5637 __ leaq(scratch2, Operand(table)); |
| 5638 __ jmp(Operand(scratch2, scratch, times_8, 0)); |
| 5639 } |
| 5640 |
| 5641 |
| 5642 void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, Label* i8, |
| 5643 Label* u8, Label* i16, Label* u16, Label* i32, |
| 5644 Label* u32, Label* u8c) { |
| 5645 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5646 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5647 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5648 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5649 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5650 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5651 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5652 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5653 |
| 5654 Label abort; |
| 5655 __ bind(table); |
| 5656 __ dq(i8); // Int8Array |
| 5657 __ dq(u8); // Uint8Array |
| 5658 __ dq(i16); // Int16Array |
| 5659 __ dq(u16); // Uint16Array |
| 5660 __ dq(i32); // Int32Array |
| 5661 __ dq(u32); // Uint32Array |
| 5662 __ dq(&abort); // Float32Array |
| 5663 __ dq(&abort); // Float64Array |
| 5664 __ dq(u8c); // Uint8ClampedArray |
| 5665 |
| 5666 __ bind(&abort); |
| 5667 __ Abort(kNoReason); |
| 5668 } |
| 5669 |
| 5670 |
| 5671 void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, Register value, |
| 5672 Label* use_heap_number) { |
| 5673 Label not_smi; |
| 5674 if (!value.is(rax)) { |
| 5675 __ movp(rax, value); |
| 5676 } |
| 5677 __ JumpIfNotValidSmiValue(rax, ¬_smi, Label::kNear); |
| 5678 __ Integer32ToSmi(rax, rax); |
| 5679 __ Ret(); |
| 5680 |
| 5681 __ bind(¬_smi); |
| 5682 __ Cvtlsi2sd(dst, rax); |
| 5683 __ jmp(use_heap_number); |
| 5684 } |
| 5685 |
| 5686 |
| 5687 void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, |
| 5688 Register value, Label* use_heap_number) { |
| 5689 Label not_smi; |
| 5690 if (!value.is(rax)) { |
| 5691 __ movp(rax, value); |
| 5692 } |
| 5693 __ JumpIfUIntNotValidSmiValue(rax, ¬_smi, Label::kNear); |
| 5694 __ Integer32ToSmi(rax, rax); |
| 5695 __ Ret(); |
| 5696 |
| 5697 __ bind(¬_smi); |
| 5698 __ Cvtqsi2sd(dst, rax); |
| 5699 __ jmp(use_heap_number); |
| 5700 } |
| 5701 |
| 5702 |
| 5703 void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, |
| 5704 Register scratch) { |
| 5705 Label call_runtime; |
| 5706 __ AllocateHeapNumber(rax, scratch, &call_runtime); |
| 5707 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); |
| 5708 __ Ret(); |
| 5709 |
| 5710 __ bind(&call_runtime); |
| 5711 { |
| 5712 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5713 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5714 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); |
| 5715 } |
| 5716 __ Ret(); |
| 5717 } |
| 5718 |
| 5719 } // anonymous namespace |
| 5720 |
| 5721 |
| 5722 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5723 Register object = rdx; |
| 5724 Register backing_store = rbx; |
| 5725 Register index = rax; |
| 5726 Label table; |
| 5727 |
| 5728 GetTypedArrayBackingStore(masm, backing_store, object, kScratchRegister); |
| 5729 TaggedToInteger32(masm, index); |
| 5730 TypedArrayJumpTablePrologue(masm, object, rcx, kScratchRegister, &table); |
| 5731 |
| 5732 Label i8, u8, i16, u16, i32, u32; |
| 5733 |
| 5734 __ bind(&i8); |
| 5735 __ movb(rax, Operand(backing_store, index, times_1, 0)); |
| 5736 __ movsxbl(rax, rax); |
| 5737 __ Integer32ToSmi(rax, rax); |
| 5738 __ Ret(); |
| 5739 |
| 5740 __ bind(&u8); |
| 5741 __ movb(rax, Operand(backing_store, index, times_1, 0)); |
| 5742 __ movzxbl(rax, rax); |
| 5743 __ Integer32ToSmi(rax, rax); |
| 5744 __ Ret(); |
| 5745 |
| 5746 __ bind(&i16); |
| 5747 __ movw(rax, Operand(backing_store, index, times_2, 0)); |
| 5748 __ movsxwl(rax, rax); |
| 5749 __ Integer32ToSmi(rax, rax); |
| 5750 __ Ret(); |
| 5751 |
| 5752 __ bind(&u16); |
| 5753 __ movw(rax, Operand(backing_store, index, times_2, 0)); |
| 5754 __ movzxwl(rax, rax); |
| 5755 __ Integer32ToSmi(rax, rax); |
| 5756 __ Ret(); |
| 5757 |
| 5758 Label use_heap_number; |
| 5759 |
| 5760 __ bind(&i32); |
| 5761 __ movl(rax, Operand(backing_store, index, times_4, 0)); |
| 5762 ReturnInteger32(masm, xmm0, rax, &use_heap_number); |
| 5763 |
| 5764 __ bind(&u32); |
| 5765 __ movl(rax, Operand(backing_store, index, times_4, 0)); |
| 5766 ReturnUnsignedInteger32(masm, xmm0, rax, &use_heap_number); |
| 5767 |
| 5768 __ bind(&use_heap_number); |
| 5769 ReturnAllocatedHeapNumber(masm, xmm0, rcx); |
| 5770 |
| 5771 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 5772 &u8); |
| 5773 } |
| 5774 |
| 5775 |
5591 #undef __ | 5776 #undef __ |
5592 | 5777 |
5593 } // namespace internal | 5778 } // namespace internal |
5594 } // namespace v8 | 5779 } // namespace v8 |
5595 | 5780 |
5596 #endif // V8_TARGET_ARCH_X64 | 5781 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |