| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 #include "src/code-stub-assembler.h" | 4 #include "src/code-stub-assembler.h" |
| 5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
| 6 #include "src/frames-inl.h" | 6 #include "src/frames-inl.h" |
| 7 #include "src/frames.h" | 7 #include "src/frames.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 673 // Bailout if receiver has slow elements. | 673 // Bailout if receiver has slow elements. |
| 674 GotoIfNot(IsFastElementsKind(elements_kind), if_false); | 674 GotoIfNot(IsFastElementsKind(elements_kind), if_false); |
| 675 | 675 |
| 676 // Check prototype chain if receiver does not have packed elements. | 676 // Check prototype chain if receiver does not have packed elements. |
| 677 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) { | 677 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) { |
| 678 GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true); | 678 GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true); |
| 679 } | 679 } |
| 680 BranchIfPrototypesHaveNoElements(map, if_true, if_false); | 680 BranchIfPrototypesHaveNoElements(map, if_true, if_false); |
| 681 } | 681 } |
| 682 | 682 |
| 683 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, | 683 Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags, |
| 684 AllocationFlags flags, | 684 Node* top_address, Node* limit_address) { |
| 685 Node* top_address, | |
| 686 Node* limit_address) { | |
| 687 Node* top = Load(MachineType::Pointer(), top_address); | 685 Node* top = Load(MachineType::Pointer(), top_address); |
| 688 Node* limit = Load(MachineType::Pointer(), limit_address); | 686 Node* limit = Load(MachineType::Pointer(), limit_address); |
| 689 | 687 |
| 690 // If there's not enough space, call the runtime. | 688 // If there's not enough space, call the runtime. |
| 691 Variable result(this, MachineRepresentation::kTagged); | 689 Variable result(this, MachineRepresentation::kTagged); |
| 692 Label runtime_call(this, Label::kDeferred), no_runtime_call(this); | 690 Label runtime_call(this, Label::kDeferred), no_runtime_call(this); |
| 693 Label merge_runtime(this, &result); | 691 Label merge_runtime(this, &result); |
| 694 | 692 |
| 693 bool needs_double_alignment = flags & kDoubleAlignment; |
| 694 |
| 695 if (flags & kAllowLargeObjectAllocation) { | 695 if (flags & kAllowLargeObjectAllocation) { |
| 696 Label next(this); | 696 Label next(this); |
| 697 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next); | 697 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next); |
| 698 | 698 |
| 699 Node* runtime_flags = SmiConstant( | 699 Node* runtime_flags = SmiConstant( |
| 700 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | | 700 Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) | |
| 701 AllocateTargetSpace::encode(AllocationSpace::LO_SPACE))); | 701 AllocateTargetSpace::encode(AllocationSpace::LO_SPACE))); |
| 702 Node* const runtime_result = | 702 Node* const runtime_result = |
| 703 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), | 703 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), |
| 704 SmiTag(size_in_bytes), runtime_flags); | 704 SmiTag(size_in_bytes), runtime_flags); |
| 705 result.Bind(runtime_result); | 705 result.Bind(runtime_result); |
| 706 Goto(&merge_runtime); | 706 Goto(&merge_runtime); |
| 707 | 707 |
| 708 Bind(&next); | 708 Bind(&next); |
| 709 } | 709 } |
| 710 | 710 |
| 711 Node* new_top = IntPtrAdd(top, size_in_bytes); | 711 Variable adjusted_size(this, MachineType::PointerRepresentation(), |
| 712 size_in_bytes); |
| 713 |
| 714 if (needs_double_alignment) { |
| 715 Label not_aligned(this), done_alignment(this, &adjusted_size); |
| 716 |
| 717 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned, |
| 718 &done_alignment); |
| 719 |
| 720 Bind(¬_aligned); |
| 721 Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4)); |
| 722 adjusted_size.Bind(not_aligned_size); |
| 723 Goto(&done_alignment); |
| 724 |
| 725 Bind(&done_alignment); |
| 726 } |
| 727 |
| 728 Node* new_top = IntPtrAdd(top, adjusted_size.value()); |
| 729 |
| 712 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call, | 730 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call, |
| 713 &no_runtime_call); | 731 &no_runtime_call); |
| 714 | 732 |
| 715 Bind(&runtime_call); | 733 Bind(&runtime_call); |
| 716 Node* runtime_result; | 734 Node* runtime_result; |
| 717 if (flags & kPretenured) { | 735 if (flags & kPretenured) { |
| 718 Node* runtime_flags = SmiConstant( | 736 Node* runtime_flags = SmiConstant( |
| 719 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | | 737 Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) | |
| 720 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE))); | 738 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE))); |
| 721 runtime_result = | 739 runtime_result = |
| 722 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), | 740 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), |
| 723 SmiTag(size_in_bytes), runtime_flags); | 741 SmiTag(size_in_bytes), runtime_flags); |
| 724 } else { | 742 } else { |
| 725 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace, | 743 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace, |
| 726 NoContextConstant(), SmiTag(size_in_bytes)); | 744 NoContextConstant(), SmiTag(size_in_bytes)); |
| 727 } | 745 } |
| 728 result.Bind(runtime_result); | 746 result.Bind(runtime_result); |
| 729 Goto(&merge_runtime); | 747 Goto(&merge_runtime); |
| 730 | 748 |
| 731 // When there is enough space, return `top' and bump it up. | 749 // When there is enough space, return `top' and bump it up. |
| 732 Bind(&no_runtime_call); | 750 Bind(&no_runtime_call); |
| 733 Node* no_runtime_result = top; | 751 Node* no_runtime_result = top; |
| 734 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | 752 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
| 735 new_top); | 753 new_top); |
| 754 |
| 755 Variable address(this, MachineType::PointerRepresentation(), |
| 756 no_runtime_result); |
| 757 |
| 758 if (needs_double_alignment) { |
| 759 Label needs_filler(this), done_filling(this, &address); |
| 760 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling, |
| 761 &needs_filler); |
| 762 |
| 763 Bind(&needs_filler); |
| 764 // Store a filler and increase the address by kPointerSize. |
| 765 StoreNoWriteBarrier(MachineRepresentation::kTagged, top, |
| 766 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); |
| 767 address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4))); |
| 768 |
| 769 Goto(&done_filling); |
| 770 |
| 771 Bind(&done_filling); |
| 772 } |
| 773 |
| 736 no_runtime_result = BitcastWordToTagged( | 774 no_runtime_result = BitcastWordToTagged( |
| 737 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag))); | 775 IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag))); |
| 776 |
| 738 result.Bind(no_runtime_result); | 777 result.Bind(no_runtime_result); |
| 739 Goto(&merge_runtime); | 778 Goto(&merge_runtime); |
| 740 | 779 |
| 741 Bind(&merge_runtime); | 780 Bind(&merge_runtime); |
| 742 return result.value(); | 781 return result.value(); |
| 743 } | 782 } |
| 744 | 783 |
| 745 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, | 784 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| 746 AllocationFlags flags, | 785 AllocationFlags flags, |
| 747 Node* top_address, | 786 Node* top_address, |
| 748 Node* limit_address) { | 787 Node* limit_address) { |
| 749 Node* top = Load(MachineType::Pointer(), top_address); | 788 DCHECK((flags & kDoubleAlignment) == 0); |
| 750 Variable adjusted_size(this, MachineType::PointerRepresentation(), | 789 return AllocateRaw(size_in_bytes, flags, top_address, limit_address); |
| 751 size_in_bytes); | 790 } |
| 752 if (flags & kDoubleAlignment) { | |
| 753 Label not_aligned(this), done_alignment(this, &adjusted_size); | |
| 754 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned, | |
| 755 &done_alignment); | |
| 756 | 791 |
| 757 Bind(¬_aligned); | 792 Node* CodeStubAssembler::AllocateRawDoubleAligned(Node* size_in_bytes, |
| 758 Node* not_aligned_size = | 793 AllocationFlags flags, |
| 759 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); | 794 Node* top_address, |
| 760 adjusted_size.Bind(not_aligned_size); | 795 Node* limit_address) { |
| 761 Goto(&done_alignment); | 796 #if defined(V8_HOST_ARCH_32_BIT) |
| 762 | 797 return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address, |
| 763 Bind(&done_alignment); | 798 limit_address); |
| 764 } | 799 #elif defined(V8_HOST_ARCH_64_BIT) |
| 765 | 800 // Allocation on 64 bit machine is naturally double aligned |
| 766 Variable address(this, MachineRepresentation::kTagged, | 801 return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address, |
| 767 AllocateRawUnaligned(adjusted_size.value(), kNone, | 802 limit_address); |
| 768 top_address, limit_address)); | 803 #else |
| 769 | 804 #error Architecture not supported |
| 770 Label needs_filler(this), done_filling(this, &address); | 805 #endif |
| 771 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling, | |
| 772 &needs_filler); | |
| 773 | |
| 774 Bind(&needs_filler); | |
| 775 // Store a filler and increase the address by kPointerSize. | |
| 776 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, | |
| 777 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); | |
| 778 address.Bind(BitcastWordToTagged( | |
| 779 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize)))); | |
| 780 Goto(&done_filling); | |
| 781 | |
| 782 Bind(&done_filling); | |
| 783 // Update the top. | |
| 784 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
| 785 IntPtrAdd(top, adjusted_size.value())); | |
| 786 return address.value(); | |
| 787 } | 806 } |
| 788 | 807 |
| 789 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes, | 808 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes, |
| 790 AllocationFlags flags) { | 809 AllocationFlags flags) { |
| 791 DCHECK(flags == kNone || flags == kDoubleAlignment); | 810 DCHECK(flags == kNone || flags == kDoubleAlignment); |
| 792 CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes)); | 811 CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes)); |
| 793 return Allocate(size_in_bytes, flags); | 812 return Allocate(size_in_bytes, flags); |
| 794 } | 813 } |
| 795 | 814 |
| 796 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { | 815 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { |
| 797 Comment("Allocate"); | 816 Comment("Allocate"); |
| 798 bool const new_space = !(flags & kPretenured); | 817 bool const new_space = !(flags & kPretenured); |
| 799 Node* top_address = ExternalConstant( | 818 Node* top_address = ExternalConstant( |
| 800 new_space | 819 new_space |
| 801 ? ExternalReference::new_space_allocation_top_address(isolate()) | 820 ? ExternalReference::new_space_allocation_top_address(isolate()) |
| 802 : ExternalReference::old_space_allocation_top_address(isolate())); | 821 : ExternalReference::old_space_allocation_top_address(isolate())); |
| 803 DCHECK_EQ(kPointerSize, | 822 DCHECK_EQ(kPointerSize, |
| 804 ExternalReference::new_space_allocation_limit_address(isolate()) | 823 ExternalReference::new_space_allocation_limit_address(isolate()) |
| 805 .address() - | 824 .address() - |
| 806 ExternalReference::new_space_allocation_top_address(isolate()) | 825 ExternalReference::new_space_allocation_top_address(isolate()) |
| 807 .address()); | 826 .address()); |
| 808 DCHECK_EQ(kPointerSize, | 827 DCHECK_EQ(kPointerSize, |
| 809 ExternalReference::old_space_allocation_limit_address(isolate()) | 828 ExternalReference::old_space_allocation_limit_address(isolate()) |
| 810 .address() - | 829 .address() - |
| 811 ExternalReference::old_space_allocation_top_address(isolate()) | 830 ExternalReference::old_space_allocation_top_address(isolate()) |
| 812 .address()); | 831 .address()); |
| 813 Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize)); | 832 Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize)); |
| 814 | 833 |
| 815 #ifdef V8_HOST_ARCH_32_BIT | |
| 816 if (flags & kDoubleAlignment) { | 834 if (flags & kDoubleAlignment) { |
| 817 return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address); | 835 return AllocateRawDoubleAligned(size_in_bytes, flags, top_address, |
| 836 limit_address); |
| 837 } else { |
| 838 return AllocateRawUnaligned(size_in_bytes, flags, top_address, |
| 839 limit_address); |
| 818 } | 840 } |
| 819 #endif | |
| 820 | |
| 821 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address); | |
| 822 } | 841 } |
| 823 | 842 |
| 824 Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes, | 843 Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes, |
| 825 AllocationFlags flags) { | 844 AllocationFlags flags) { |
| 826 CHECK(flags == kNone || flags == kDoubleAlignment); | 845 CHECK(flags == kNone || flags == kDoubleAlignment); |
| 827 DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize); | 846 DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize); |
| 828 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags); | 847 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags); |
| 829 } | 848 } |
| 830 | 849 |
| 831 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { | 850 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { |
| (...skipping 7560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8392 formatted.c_str(), TENURED); | 8411 formatted.c_str(), TENURED); |
| 8393 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), | 8412 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), |
| 8394 HeapConstant(string)); | 8413 HeapConstant(string)); |
| 8395 } | 8414 } |
| 8396 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); | 8415 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); |
| 8397 #endif | 8416 #endif |
| 8398 } | 8417 } |
| 8399 | 8418 |
| 8400 } // namespace internal | 8419 } // namespace internal |
| 8401 } // namespace v8 | 8420 } // namespace v8 |
| OLD | NEW |