Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(164)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2784253002: MIPS: Fix `[builtins] Reland of Port TypedArrayInitialize to CodeStubAssembler.` (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after
673 // Bailout if receiver has slow elements. 673 // Bailout if receiver has slow elements.
674 GotoIfNot(IsFastElementsKind(elements_kind), if_false); 674 GotoIfNot(IsFastElementsKind(elements_kind), if_false);
675 675
676 // Check prototype chain if receiver does not have packed elements. 676 // Check prototype chain if receiver does not have packed elements.
677 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) { 677 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
678 GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true); 678 GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
679 } 679 }
680 BranchIfPrototypesHaveNoElements(map, if_true, if_false); 680 BranchIfPrototypesHaveNoElements(map, if_true, if_false);
681 } 681 }
682 682
683 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, 683 Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
684 AllocationFlags flags, 684 Node* top_address, Node* limit_address) {
685 Node* top_address,
686 Node* limit_address) {
687 Node* top = Load(MachineType::Pointer(), top_address); 685 Node* top = Load(MachineType::Pointer(), top_address);
688 Node* limit = Load(MachineType::Pointer(), limit_address); 686 Node* limit = Load(MachineType::Pointer(), limit_address);
689 687
690 // If there's not enough space, call the runtime. 688 // If there's not enough space, call the runtime.
691 Variable result(this, MachineRepresentation::kTagged); 689 Variable result(this, MachineRepresentation::kTagged);
692 Label runtime_call(this, Label::kDeferred), no_runtime_call(this); 690 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
693 Label merge_runtime(this, &result); 691 Label merge_runtime(this, &result);
694 692
695 if (flags & kAllowLargeObjectAllocation) { 693 if (flags & kAllowLargeObjectAllocation) {
696 Label next(this); 694 Label next(this);
697 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next); 695 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
698 696
699 Node* runtime_flags = SmiConstant( 697 Node* runtime_flags = SmiConstant(
700 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | 698 Smi::FromInt(AllocateDoubleAlignFlag::encode(flags & kDoubleAlignment) |
Camillo Bruni 2017/03/30 12:08:23 please add bool needs_double_alignment = flags
ivica.bogosavljevic 2017/03/30 13:47:57 Acknowledged.
701 AllocateTargetSpace::encode(AllocationSpace::LO_SPACE))); 699 AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
702 Node* const runtime_result = 700 Node* const runtime_result =
703 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), 701 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
704 SmiTag(size_in_bytes), runtime_flags); 702 SmiTag(size_in_bytes), runtime_flags);
705 result.Bind(runtime_result); 703 result.Bind(runtime_result);
706 Goto(&merge_runtime); 704 Goto(&merge_runtime);
707 705
708 Bind(&next); 706 Bind(&next);
709 } 707 }
710 708
711 Node* new_top = IntPtrAdd(top, size_in_bytes); 709 Variable adjusted_size(this, MachineType::PointerRepresentation(),
710 size_in_bytes);
711
712 if (flags & kDoubleAlignment) {
713 Label not_aligned(this), done_alignment(this, &adjusted_size);
714
715 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
716 &done_alignment);
717
718 Bind(&not_aligned);
719 Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
720 adjusted_size.Bind(not_aligned_size);
721 Goto(&done_alignment);
722
723 Bind(&done_alignment);
724 }
725
726 Node* new_top = IntPtrAdd(top, adjusted_size.value());
727
712 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call, 728 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
713 &no_runtime_call); 729 &no_runtime_call);
714 730
715 Bind(&runtime_call); 731 Bind(&runtime_call);
716 Node* runtime_result; 732 Node* runtime_result;
717 if (flags & kPretenured) { 733 if (flags & kPretenured) {
718 Node* runtime_flags = SmiConstant( 734 Node* runtime_flags = SmiConstant(
719 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) | 735 Smi::FromInt(AllocateDoubleAlignFlag::encode(flags & kDoubleAlignment) |
720 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE))); 736 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
721 runtime_result = 737 runtime_result =
722 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(), 738 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
723 SmiTag(size_in_bytes), runtime_flags); 739 SmiTag(size_in_bytes), runtime_flags);
724 } else { 740 } else {
725 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace, 741 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
726 NoContextConstant(), SmiTag(size_in_bytes)); 742 NoContextConstant(), SmiTag(size_in_bytes));
727 } 743 }
728 result.Bind(runtime_result); 744 result.Bind(runtime_result);
729 Goto(&merge_runtime); 745 Goto(&merge_runtime);
730 746
731 // When there is enough space, return `top' and bump it up. 747 // When there is enough space, return `top' and bump it up.
732 Bind(&no_runtime_call); 748 Bind(&no_runtime_call);
733 Node* no_runtime_result = top; 749 Node* no_runtime_result = top;
734 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, 750 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
735 new_top); 751 new_top);
752
753 Variable address(this, MachineType::PointerRepresentation(),
754 no_runtime_result);
755
756 if (flags & kDoubleAlignment) {
757 Label needs_filler(this), done_filling(this, &address);
758 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
759 &needs_filler);
760
761 Bind(&needs_filler);
762 // Store a filler and increase the address by kPointerSize.
763 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
764 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
765 address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
766
767 Goto(&done_filling);
768
769 Bind(&done_filling);
770 }
771
736 no_runtime_result = BitcastWordToTagged( 772 no_runtime_result = BitcastWordToTagged(
737 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag))); 773 IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
774
738 result.Bind(no_runtime_result); 775 result.Bind(no_runtime_result);
739 Goto(&merge_runtime); 776 Goto(&merge_runtime);
740 777
741 Bind(&merge_runtime); 778 Bind(&merge_runtime);
742 return result.value(); 779 return result.value();
743 } 780 }
744 781
782 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
783 AllocationFlags flags,
784 Node* top_address,
785 Node* limit_address) {
Camillo Bruni 2017/03/30 12:08:23 CHECK((flags & kDoubleAlignment) == 0); For consi
ivica.bogosavljevic 2017/03/30 13:47:57 This is true but for 32bit architectures only. On
786 return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
Camillo Bruni 2017/03/30 12:08:23 ... and then just use flags here directly.
787 limit_address);
788 }
789
745 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, 790 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
Camillo Bruni 2017/03/30 12:08:23 Let's make this fully obvious: can you rename this
ivica.bogosavljevic 2017/03/30 13:47:57 Acknowledged.
746 AllocationFlags flags, 791 AllocationFlags flags,
747 Node* top_address, 792 Node* top_address,
748 Node* limit_address) { 793 Node* limit_address) {
749 Node* top = Load(MachineType::Pointer(), top_address); 794 return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
750 Variable adjusted_size(this, MachineType::PointerRepresentation(), 795 limit_address);
751 size_in_bytes);
752 if (flags & kDoubleAlignment) {
753 Label not_aligned(this), done_alignment(this, &adjusted_size);
754 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
755 &done_alignment);
756
757 Bind(&not_aligned);
758 Node* not_aligned_size =
759 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
760 adjusted_size.Bind(not_aligned_size);
761 Goto(&done_alignment);
762
763 Bind(&done_alignment);
764 }
765
766 Variable address(this, MachineRepresentation::kTagged,
767 AllocateRawUnaligned(adjusted_size.value(), kNone,
768 top_address, limit_address));
769
770 Label needs_filler(this), done_filling(this, &address);
771 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
772 &needs_filler);
773
774 Bind(&needs_filler);
775 // Store a filler and increase the address by kPointerSize.
776 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
777 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
778 address.Bind(BitcastWordToTagged(
779 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))));
780 Goto(&done_filling);
781
782 Bind(&done_filling);
783 // Update the top.
784 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
785 IntPtrAdd(top, adjusted_size.value()));
786 return address.value();
787 } 796 }
788 797
789 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes, 798 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
790 AllocationFlags flags) { 799 AllocationFlags flags) {
791 DCHECK(flags == kNone || flags == kDoubleAlignment); 800 DCHECK(flags == kNone || flags == kDoubleAlignment);
792 CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes)); 801 CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
793 return Allocate(size_in_bytes, flags); 802 return Allocate(size_in_bytes, flags);
794 } 803 }
795 804
796 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { 805 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
(...skipping 7595 matching lines...) Expand 10 before | Expand all | Expand 10 after
8392 formatted.c_str(), TENURED); 8401 formatted.c_str(), TENURED);
8393 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), 8402 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
8394 HeapConstant(string)); 8403 HeapConstant(string));
8395 } 8404 }
8396 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); 8405 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
8397 #endif 8406 #endif
8398 } 8407 }
8399 8408
8400 } // namespace internal 8409 } // namespace internal
8401 } // namespace v8 8410 } // namespace v8
OLDNEW
« src/builtins/builtins-typedarray-gen.cc ('K') | « src/code-stub-assembler.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698