Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1746)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2552883012: [interpreter][stubs] Fixing issues found by machine graph verifier. (Closed)
Patch Set: Addressing nits Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 // Check if the multiplication overflowed. 564 // Check if the multiplication overflowed.
565 Label if_overflow(this, Label::kDeferred), if_notoverflow(this); 565 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
566 Branch(overflow, &if_overflow, &if_notoverflow); 566 Branch(overflow, &if_overflow, &if_notoverflow);
567 Bind(&if_notoverflow); 567 Bind(&if_notoverflow);
568 { 568 {
569 // If the answer is zero, we may need to return -0.0, depending on the 569 // If the answer is zero, we may need to return -0.0, depending on the
570 // input. 570 // input.
571 Label answer_zero(this), answer_not_zero(this); 571 Label answer_zero(this), answer_not_zero(this);
572 Node* answer = Projection(0, pair); 572 Node* answer = Projection(0, pair);
573 Node* zero = Int32Constant(0); 573 Node* zero = Int32Constant(0);
574 Branch(WordEqual(answer, zero), &answer_zero, &answer_not_zero); 574 Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
575 Bind(&answer_not_zero); 575 Bind(&answer_not_zero);
576 { 576 {
577 var_result.Bind(ChangeInt32ToTagged(answer)); 577 var_result.Bind(ChangeInt32ToTagged(answer));
578 Goto(&return_result); 578 Goto(&return_result);
579 } 579 }
580 Bind(&answer_zero); 580 Bind(&answer_zero);
581 { 581 {
582 Node* or_result = Word32Or(lhs32, rhs32); 582 Node* or_result = Word32Or(lhs32, rhs32);
583 Label if_should_be_negative_zero(this), if_should_be_zero(this); 583 Label if_should_be_negative_zero(this), if_should_be_zero(this);
584 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero, 584 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
585 &if_should_be_zero); 585 &if_should_be_zero);
586 Bind(&if_should_be_negative_zero); 586 Bind(&if_should_be_negative_zero);
587 { 587 {
588 var_result.Bind(MinusZeroConstant()); 588 var_result.Bind(MinusZeroConstant());
589 Goto(&return_result); 589 Goto(&return_result);
590 } 590 }
591 Bind(&if_should_be_zero); 591 Bind(&if_should_be_zero);
592 { 592 {
593 var_result.Bind(zero); 593 var_result.Bind(SmiConstant(0));
594 Goto(&return_result); 594 Goto(&return_result);
595 } 595 }
596 } 596 }
597 } 597 }
598 Bind(&if_overflow); 598 Bind(&if_overflow);
599 { 599 {
600 var_lhs_float64.Bind(SmiToFloat64(a)); 600 var_lhs_float64.Bind(SmiToFloat64(a));
601 var_rhs_float64.Bind(SmiToFloat64(b)); 601 var_rhs_float64.Bind(SmiToFloat64(b));
602 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value()); 602 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
603 Node* result = AllocateHeapNumberWithValue(value); 603 Node* result = AllocateHeapNumberWithValue(value);
(...skipping 16 matching lines...) Expand all
620 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)), 620 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
621 IntPtrConstant(0)); 621 IntPtrConstant(0));
622 } 622 }
623 623
624 Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) { 624 Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
625 return WordNotEqual( 625 return WordNotEqual(
626 WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)), 626 WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
627 IntPtrConstant(0)); 627 IntPtrConstant(0));
628 } 628 }
629 629
630 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 630 Node* CodeStubAssembler::TaggedIsPositiveSmi(Node* a) {
631 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 631 return WordEqual(WordAnd(BitcastTaggedToWord(a),
632 IntPtrConstant(kSmiTagMask | kSmiSignMask)),
632 IntPtrConstant(0)); 633 IntPtrConstant(0));
633 } 634 }
634 635
635 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { 636 Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
636 return WordEqual(IntPtrConstant(0), 637 return WordEqual(IntPtrConstant(0),
637 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); 638 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
638 } 639 }
639 640
640 void CodeStubAssembler::BranchIfSimd128Equal(Node* lhs, Node* lhs_map, 641 void CodeStubAssembler::BranchIfSimd128Equal(Node* lhs, Node* lhs_map,
641 Node* rhs, Node* rhs_map, 642 Node* rhs, Node* rhs_map,
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after
914 #endif 915 #endif
915 916
916 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address); 917 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
917 } 918 }
918 919
919 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { 920 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
920 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags); 921 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
921 } 922 }
922 923
923 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) { 924 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
924 return BitcastWordToTagged(IntPtrAdd(previous, offset)); 925 return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
925 } 926 }
926 927
927 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { 928 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
928 return InnerAllocate(previous, IntPtrConstant(offset)); 929 return InnerAllocate(previous, IntPtrConstant(offset));
929 } 930 }
930 931
931 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) { 932 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
932 return UintPtrLessThanOrEqual(size, 933 return UintPtrLessThanOrEqual(size,
933 IntPtrConstant(kMaxRegularHeapObjectSize)); 934 IntPtrConstant(kMaxRegularHeapObjectSize));
934 } 935 }
(...skipping 644 matching lines...) Expand 10 before | Expand all | Expand 10 after
1579 } 1580 }
1580 1581
1581 Node* CodeStubAssembler::AllocateSeqOneByteString(int length, 1582 Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
1582 AllocationFlags flags) { 1583 AllocationFlags flags) {
1583 Comment("AllocateSeqOneByteString"); 1584 Comment("AllocateSeqOneByteString");
1584 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags); 1585 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
1585 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); 1586 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1586 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex); 1587 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1587 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset, 1588 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1588 SmiConstant(Smi::FromInt(length))); 1589 SmiConstant(Smi::FromInt(length)));
1589 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset, 1590 // Initialize both used and unused parts of hash field slot at once.
1591 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1590 IntPtrConstant(String::kEmptyHashField), 1592 IntPtrConstant(String::kEmptyHashField),
1591 MachineRepresentation::kWord32); 1593 MachineType::PointerRepresentation());
1592 return result; 1594 return result;
1593 } 1595 }
1594 1596
1595 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, 1597 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1596 ParameterMode mode, 1598 ParameterMode mode,
1597 AllocationFlags flags) { 1599 AllocationFlags flags) {
1598 Comment("AllocateSeqOneByteString"); 1600 Comment("AllocateSeqOneByteString");
1599 Variable var_result(this, MachineRepresentation::kTagged); 1601 Variable var_result(this, MachineRepresentation::kTagged);
1600 1602
1601 // Compute the SeqOneByteString size and check if it fits into new space. 1603 // Compute the SeqOneByteString size and check if it fits into new space.
1602 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), 1604 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1603 if_join(this); 1605 if_join(this);
1604 Node* raw_size = GetArrayAllocationSize( 1606 Node* raw_size = GetArrayAllocationSize(
1605 length, UINT8_ELEMENTS, mode, 1607 length, UINT8_ELEMENTS, mode,
1606 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1608 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1607 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask)); 1609 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1608 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)), 1610 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1609 &if_sizeissmall, &if_notsizeissmall); 1611 &if_sizeissmall, &if_notsizeissmall);
1610 1612
1611 Bind(&if_sizeissmall); 1613 Bind(&if_sizeissmall);
1612 { 1614 {
1613 // Just allocate the SeqOneByteString in new space. 1615 // Just allocate the SeqOneByteString in new space.
1614 Node* result = Allocate(size, flags); 1616 Node* result = Allocate(size, flags);
1615 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); 1617 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1616 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex); 1618 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1617 StoreObjectFieldNoWriteBarrier( 1619 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1618 result, SeqOneByteString::kLengthOffset, 1620 TagParameter(length, mode));
1619 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1621 // Initialize both used and unused parts of hash field slot at once.
1620 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset, 1622 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1621 IntPtrConstant(String::kEmptyHashField), 1623 IntPtrConstant(String::kEmptyHashField),
1622 MachineRepresentation::kWord32); 1624 MachineType::PointerRepresentation());
1623 var_result.Bind(result); 1625 var_result.Bind(result);
1624 Goto(&if_join); 1626 Goto(&if_join);
1625 } 1627 }
1626 1628
1627 Bind(&if_notsizeissmall); 1629 Bind(&if_notsizeissmall);
1628 { 1630 {
1629 // We might need to allocate in large object space, go to the runtime. 1631 // We might need to allocate in large object space, go to the runtime.
1630 Node* result = 1632 Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
1631 CallRuntime(Runtime::kAllocateSeqOneByteString, context, 1633 TagParameter(length, mode));
1632 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1633 var_result.Bind(result); 1634 var_result.Bind(result);
1634 Goto(&if_join); 1635 Goto(&if_join);
1635 } 1636 }
1636 1637
1637 Bind(&if_join); 1638 Bind(&if_join);
1638 return var_result.value(); 1639 return var_result.value();
1639 } 1640 }
1640 1641
1641 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length, 1642 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
1642 AllocationFlags flags) { 1643 AllocationFlags flags) {
1643 Comment("AllocateSeqTwoByteString"); 1644 Comment("AllocateSeqTwoByteString");
1644 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags); 1645 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
1645 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); 1646 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1646 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex); 1647 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1647 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset, 1648 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
1648 SmiConstant(Smi::FromInt(length))); 1649 SmiConstant(Smi::FromInt(length)));
1649 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, 1650 // Initialize both used and unused parts of hash field slot at once.
1651 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1650 IntPtrConstant(String::kEmptyHashField), 1652 IntPtrConstant(String::kEmptyHashField),
1651 MachineRepresentation::kWord32); 1653 MachineType::PointerRepresentation());
1652 return result; 1654 return result;
1653 } 1655 }
1654 1656
1655 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, 1657 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1656 ParameterMode mode, 1658 ParameterMode mode,
1657 AllocationFlags flags) { 1659 AllocationFlags flags) {
1658 Comment("AllocateSeqTwoByteString"); 1660 Comment("AllocateSeqTwoByteString");
1659 Variable var_result(this, MachineRepresentation::kTagged); 1661 Variable var_result(this, MachineRepresentation::kTagged);
1660 1662
1661 // Compute the SeqTwoByteString size and check if it fits into new space. 1663 // Compute the SeqTwoByteString size and check if it fits into new space.
1662 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), 1664 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1663 if_join(this); 1665 if_join(this);
1664 Node* raw_size = GetArrayAllocationSize( 1666 Node* raw_size = GetArrayAllocationSize(
1665 length, UINT16_ELEMENTS, mode, 1667 length, UINT16_ELEMENTS, mode,
1666 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1668 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1667 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask)); 1669 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1668 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)), 1670 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1669 &if_sizeissmall, &if_notsizeissmall); 1671 &if_sizeissmall, &if_notsizeissmall);
1670 1672
1671 Bind(&if_sizeissmall); 1673 Bind(&if_sizeissmall);
1672 { 1674 {
1673 // Just allocate the SeqTwoByteString in new space. 1675 // Just allocate the SeqTwoByteString in new space.
1674 Node* result = Allocate(size, flags); 1676 Node* result = Allocate(size, flags);
1675 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); 1677 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1676 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex); 1678 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1677 StoreObjectFieldNoWriteBarrier( 1679 StoreObjectFieldNoWriteBarrier(
1678 result, SeqTwoByteString::kLengthOffset, 1680 result, SeqTwoByteString::kLengthOffset,
1679 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1681 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1680 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, 1682 // Initialize both used and unused parts of hash field slot at once.
1683 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1681 IntPtrConstant(String::kEmptyHashField), 1684 IntPtrConstant(String::kEmptyHashField),
1682 MachineRepresentation::kWord32); 1685 MachineType::PointerRepresentation());
1683 var_result.Bind(result); 1686 var_result.Bind(result);
1684 Goto(&if_join); 1687 Goto(&if_join);
1685 } 1688 }
1686 1689
1687 Bind(&if_notsizeissmall); 1690 Bind(&if_notsizeissmall);
1688 { 1691 {
1689 // We might need to allocate in large object space, go to the runtime. 1692 // We might need to allocate in large object space, go to the runtime.
1690 Node* result = 1693 Node* result =
1691 CallRuntime(Runtime::kAllocateSeqTwoByteString, context, 1694 CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
1692 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1695 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1693 var_result.Bind(result); 1696 var_result.Bind(result);
1694 Goto(&if_join); 1697 Goto(&if_join);
1695 } 1698 }
1696 1699
1697 Bind(&if_join); 1700 Bind(&if_join);
1698 return var_result.value(); 1701 return var_result.value();
1699 } 1702 }
1700 1703
1701 Node* CodeStubAssembler::AllocateSlicedString( 1704 Node* CodeStubAssembler::AllocateSlicedString(
1702 Heap::RootListIndex map_root_index, Node* length, Node* parent, 1705 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1703 Node* offset) { 1706 Node* offset) {
1704 CSA_ASSERT(this, TaggedIsSmi(length)); 1707 CSA_ASSERT(this, TaggedIsSmi(length));
1705 Node* result = Allocate(SlicedString::kSize); 1708 Node* result = Allocate(SlicedString::kSize);
1706 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1709 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1707 StoreMapNoWriteBarrier(result, map_root_index); 1710 StoreMapNoWriteBarrier(result, map_root_index);
1708 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, 1711 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1709 MachineRepresentation::kTagged); 1712 MachineRepresentation::kTagged);
1710 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset, 1713 // Initialize both used and unused parts of hash field slot at once.
1711 Int32Constant(String::kEmptyHashField), 1714 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
1712 MachineRepresentation::kWord32); 1715 IntPtrConstant(String::kEmptyHashField),
1716 MachineType::PointerRepresentation());
1713 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, 1717 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
1714 MachineRepresentation::kTagged); 1718 MachineRepresentation::kTagged);
1715 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset, 1719 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
1716 MachineRepresentation::kTagged); 1720 MachineRepresentation::kTagged);
1717 return result; 1721 return result;
1718 } 1722 }
1719 1723
1720 Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent, 1724 Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
1721 Node* offset) { 1725 Node* offset) {
1722 return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length, 1726 return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
1723 parent, offset); 1727 parent, offset);
1724 } 1728 }
1725 1729
1726 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, 1730 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1727 Node* offset) { 1731 Node* offset) {
1728 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, 1732 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1729 offset); 1733 offset);
1730 } 1734 }
1731 1735
1732 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, 1736 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1733 Node* length, Node* first, 1737 Node* length, Node* first,
1734 Node* second, 1738 Node* second,
1735 AllocationFlags flags) { 1739 AllocationFlags flags) {
1736 CSA_ASSERT(this, TaggedIsSmi(length)); 1740 CSA_ASSERT(this, TaggedIsSmi(length));
1737 Node* result = Allocate(ConsString::kSize, flags); 1741 Node* result = Allocate(ConsString::kSize, flags);
1738 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1742 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1739 StoreMapNoWriteBarrier(result, map_root_index); 1743 StoreMapNoWriteBarrier(result, map_root_index);
1740 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, 1744 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1741 MachineRepresentation::kTagged); 1745 MachineRepresentation::kTagged);
1742 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset, 1746 // Initialize both used and unused parts of hash field slot at once.
1743 Int32Constant(String::kEmptyHashField), 1747 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
1744 MachineRepresentation::kWord32); 1748 IntPtrConstant(String::kEmptyHashField),
1749 MachineType::PointerRepresentation());
1745 bool const new_space = !(flags & kPretenured); 1750 bool const new_space = !(flags & kPretenured);
1746 if (new_space) { 1751 if (new_space) {
1747 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first, 1752 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
1748 MachineRepresentation::kTagged); 1753 MachineRepresentation::kTagged);
1749 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second, 1754 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
1750 MachineRepresentation::kTagged); 1755 MachineRepresentation::kTagged);
1751 } else { 1756 } else {
1752 StoreObjectField(result, ConsString::kFirstOffset, first); 1757 StoreObjectField(result, ConsString::kFirstOffset, first);
1753 StoreObjectField(result, ConsString::kSecondOffset, second); 1758 StoreObjectField(result, ConsString::kSecondOffset, second);
1754 } 1759 }
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
2008 2013
2009 int elements_offset = base_size; 2014 int elements_offset = base_size;
2010 2015
2011 // Compute space for elements 2016 // Compute space for elements
2012 base_size += FixedArray::kHeaderSize; 2017 base_size += FixedArray::kHeaderSize;
2013 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); 2018 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
2014 2019
2015 Node* array = AllocateUninitializedJSArray(kind, array_map, length, 2020 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2016 allocation_site, size); 2021 allocation_site, size);
2017 2022
2018 // The bitcast here is safe because InnerAllocate doesn't actually allocate. 2023 Node* elements = InnerAllocate(array, elements_offset);
2019 Node* elements = InnerAllocate(BitcastTaggedToWord(array), elements_offset);
2020 StoreObjectField(array, JSObject::kElementsOffset, elements); 2024 StoreObjectField(array, JSObject::kElementsOffset, elements);
2021 2025
2022 return {array, elements}; 2026 return {array, elements};
2023 } 2027 }
2024 2028
2025 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, 2029 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
2026 Node* array_map, 2030 Node* array_map,
2027 Node* length, 2031 Node* length,
2028 Node* allocation_site, 2032 Node* allocation_site,
2029 Node* size_in_bytes) { 2033 Node* size_in_bytes) {
2030 Node* array = Allocate(size_in_bytes); 2034 Node* array = Allocate(size_in_bytes);
2031 2035
2032 Comment("write JSArray headers"); 2036 Comment("write JSArray headers");
2033 StoreMapNoWriteBarrier(array, array_map); 2037 StoreMapNoWriteBarrier(array, array_map);
2034 2038
2039 CSA_ASSERT(this, TaggedIsSmi(length));
2035 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); 2040 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2036 2041
2037 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, 2042 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
2038 Heap::kEmptyFixedArrayRootIndex); 2043 Heap::kEmptyFixedArrayRootIndex);
2039 2044
2040 if (allocation_site != nullptr) { 2045 if (allocation_site != nullptr) {
2041 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); 2046 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
2042 } 2047 }
2043 return array; 2048 return array;
2044 } 2049 }
(...skipping 1208 matching lines...) Expand 10 before | Expand all | Expand 10 after
3253 return var_result.value(); 3258 return var_result.value();
3254 } 3259 }
3255 3260
3256 } // namespace 3261 } // namespace
3257 3262
3258 Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from, 3263 Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
3259 Node* to) { 3264 Node* to) {
3260 Label end(this); 3265 Label end(this);
3261 Label runtime(this); 3266 Label runtime(this);
3262 3267
3263 Variable var_instance_type(this, MachineRepresentation::kWord8); // Int32. 3268 Variable var_instance_type(this, MachineRepresentation::kWord32); // Int32.
3264 Variable var_result(this, MachineRepresentation::kTagged); // String. 3269 Variable var_result(this, MachineRepresentation::kTagged); // String.
3265 Variable var_from(this, MachineRepresentation::kTagged); // Smi. 3270 Variable var_from(this, MachineRepresentation::kTagged); // Smi.
3266 Variable var_string(this, MachineRepresentation::kTagged); // String. 3271 Variable var_string(this, MachineRepresentation::kTagged); // String.
3267 3272
3268 var_instance_type.Bind(Int32Constant(0)); 3273 var_instance_type.Bind(Int32Constant(0));
3269 var_string.Bind(string); 3274 var_string.Bind(string);
3270 var_from.Bind(from); 3275 var_from.Bind(from);
3271 3276
3272 // Make sure first argument is a string. 3277 // Make sure first argument is a string.
3273 3278
3274 // Bailout if receiver is a Smi. 3279 // Bailout if receiver is a Smi.
3275 GotoIf(TaggedIsSmi(string), &runtime); 3280 GotoIf(TaggedIsSmi(string), &runtime);
3276 3281
3277 // Load the instance type of the {string}. 3282 // Load the instance type of the {string}.
3278 Node* const instance_type = LoadInstanceType(string); 3283 Node* const instance_type = LoadInstanceType(string);
3279 var_instance_type.Bind(instance_type); 3284 var_instance_type.Bind(instance_type);
3280 3285
3281 // Check if {string} is a String. 3286 // Check if {string} is a String.
3282 GotoUnless(IsStringInstanceType(instance_type), &runtime); 3287 GotoUnless(IsStringInstanceType(instance_type), &runtime);
3283 3288
3284 // Make sure that both from and to are non-negative smis. 3289 // Make sure that both from and to are non-negative smis.
3285 3290
3286 GotoUnless(WordIsPositiveSmi(from), &runtime); 3291 GotoUnless(TaggedIsPositiveSmi(from), &runtime);
3287 GotoUnless(WordIsPositiveSmi(to), &runtime); 3292 GotoUnless(TaggedIsPositiveSmi(to), &runtime);
3288 3293
3289 Node* const substr_length = SmiSub(to, from); 3294 Node* const substr_length = SmiSub(to, from);
3290 Node* const string_length = LoadStringLength(string); 3295 Node* const string_length = LoadStringLength(string);
3291 3296
3292 // Begin dispatching based on substring length. 3297 // Begin dispatching based on substring length.
3293 3298
3294 Label original_string_or_invalid_length(this); 3299 Label original_string_or_invalid_length(this);
3295 GotoIf(SmiAboveOrEqual(substr_length, string_length), 3300 GotoIf(SmiAboveOrEqual(substr_length, string_length),
3296 &original_string_or_invalid_length); 3301 &original_string_or_invalid_length);
3297 3302
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
3417 STATIC_ASSERT(kShortExternalStringTag != 0); 3422 STATIC_ASSERT(kShortExternalStringTag != 0);
3418 GotoIf(Word32NotEqual(Word32And(var_instance_type.value(), 3423 GotoIf(Word32NotEqual(Word32And(var_instance_type.value(),
3419 Int32Constant(kShortExternalStringMask)), 3424 Int32Constant(kShortExternalStringMask)),
3420 Int32Constant(0)), 3425 Int32Constant(0)),
3421 &runtime); 3426 &runtime);
3422 3427
3423 // Move the pointer so that offset-wise, it looks like a sequential string. 3428 // Move the pointer so that offset-wise, it looks like a sequential string.
3424 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == 3429 STATIC_ASSERT(SeqTwoByteString::kHeaderSize ==
3425 SeqOneByteString::kHeaderSize); 3430 SeqOneByteString::kHeaderSize);
3426 3431
3427 Node* resource_data = LoadObjectField(var_string.value(), 3432 Node* resource_data =
3428 ExternalString::kResourceDataOffset); 3433 LoadObjectField(var_string.value(), ExternalString::kResourceDataOffset,
3434 MachineType::Pointer());
3429 Node* const fake_sequential_string = IntPtrSub( 3435 Node* const fake_sequential_string = IntPtrSub(
3430 resource_data, 3436 resource_data,
3431 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3437 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3432 3438
3433 var_result.Bind(AllocAndCopyStringCharacters( 3439 var_result.Bind(AllocAndCopyStringCharacters(
3434 this, context, fake_sequential_string, var_instance_type.value(), 3440 this, context, fake_sequential_string, var_instance_type.value(),
3435 var_from.value(), substr_length)); 3441 var_from.value(), substr_length));
3436 3442
3437 Counters* counters = isolate()->counters(); 3443 Counters* counters = isolate()->counters();
3438 IncrementCounter(counters->sub_string_native(), 1); 3444 IncrementCounter(counters->sub_string_native(), 1);
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after
3976 Label out(this); 3982 Label out(this);
3977 3983
3978 Variable var_result(this, MachineRepresentation::kTagged); 3984 Variable var_result(this, MachineRepresentation::kTagged);
3979 var_result.Bind(input); 3985 var_result.Bind(input);
3980 3986
3981 // Early exit for positive smis. 3987 // Early exit for positive smis.
3982 { 3988 {
3983 // TODO(jgruber): This branch and the recheck below can be removed once we 3989 // TODO(jgruber): This branch and the recheck below can be removed once we
3984 // have a ToNumber with multiple exits. 3990 // have a ToNumber with multiple exits.
3985 Label next(this, Label::kDeferred); 3991 Label next(this, Label::kDeferred);
3986 Branch(WordIsPositiveSmi(input), &out, &next); 3992 Branch(TaggedIsPositiveSmi(input), &out, &next);
3987 Bind(&next); 3993 Bind(&next);
3988 } 3994 }
3989 3995
3990 Node* const number = ToNumber(context, input); 3996 Node* const number = ToNumber(context, input);
3991 var_result.Bind(number); 3997 var_result.Bind(number);
3992 3998
3993 // Perhaps we have a positive smi now. 3999 // Perhaps we have a positive smi now.
3994 { 4000 {
3995 Label next(this, Label::kDeferred); 4001 Label next(this, Label::kDeferred);
3996 Branch(WordIsPositiveSmi(number), &out, &next); 4002 Branch(TaggedIsPositiveSmi(number), &out, &next);
3997 Bind(&next); 4003 Bind(&next);
3998 } 4004 }
3999 4005
4000 Label if_isnegativesmi(this), if_isheapnumber(this); 4006 Label if_isnegativesmi(this), if_isheapnumber(this);
4001 Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber); 4007 Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
4002 4008
4003 Bind(&if_isnegativesmi); 4009 Bind(&if_isnegativesmi);
4004 { 4010 {
4005 // floor({input}) mod 2^32 === {input} + 2^32. 4011 // floor({input}) mod 2^32 === {input} + 2^32.
4006 Node* const float_number = SmiToFloat64(number); 4012 Node* const float_number = SmiToFloat64(number);
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after
4850 var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number)); 4856 var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
4851 } 4857 }
4852 Goto(&rebox_double); 4858 Goto(&rebox_double);
4853 } 4859 }
4854 } 4860 }
4855 Bind(&if_backing_store); 4861 Bind(&if_backing_store);
4856 { 4862 {
4857 Comment("if_backing_store"); 4863 Comment("if_backing_store");
4858 Node* properties = LoadProperties(object); 4864 Node* properties = LoadProperties(object);
4859 field_index = IntPtrSub(field_index, inobject_properties); 4865 field_index = IntPtrSub(field_index, inobject_properties);
4860 Node* value = LoadFixedArrayElement(properties, field_index); 4866 Node* value =
4867 LoadFixedArrayElement(properties, field_index, 0, INTPTR_PARAMETERS);
4861 4868
4862 Label if_double(this), if_tagged(this); 4869 Label if_double(this), if_tagged(this);
4863 Branch(Word32NotEqual(representation, 4870 Branch(Word32NotEqual(representation,
4864 Int32Constant(Representation::kDouble)), 4871 Int32Constant(Representation::kDouble)),
4865 &if_tagged, &if_double); 4872 &if_tagged, &if_double);
4866 Bind(&if_tagged); 4873 Bind(&if_tagged);
4867 { 4874 {
4868 var_value->Bind(value); 4875 var_value->Bind(value);
4869 Goto(&done); 4876 Goto(&done);
4870 } 4877 }
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
4904 (NameDictionary::kEntryDetailsIndex - NameDictionary::kEntryKeyIndex) * 4911 (NameDictionary::kEntryDetailsIndex - NameDictionary::kEntryKeyIndex) *
4905 kPointerSize; 4912 kPointerSize;
4906 const int name_to_value_offset = 4913 const int name_to_value_offset =
4907 (NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) * 4914 (NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
4908 kPointerSize; 4915 kPointerSize;
4909 4916
4910 Node* details = LoadAndUntagToWord32FixedArrayElement(dictionary, name_index, 4917 Node* details = LoadAndUntagToWord32FixedArrayElement(dictionary, name_index,
4911 name_to_details_offset); 4918 name_to_details_offset);
4912 4919
4913 var_details->Bind(details); 4920 var_details->Bind(details);
4914 var_value->Bind( 4921 var_value->Bind(LoadFixedArrayElement(
4915 LoadFixedArrayElement(dictionary, name_index, name_to_value_offset)); 4922 dictionary, name_index, name_to_value_offset, INTPTR_PARAMETERS));
4916 4923
4917 Comment("] LoadPropertyFromNameDictionary"); 4924 Comment("] LoadPropertyFromNameDictionary");
4918 } 4925 }
4919 4926
4920 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary, 4927 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
4921 Node* name_index, 4928 Node* name_index,
4922 Variable* var_details, 4929 Variable* var_details,
4923 Variable* var_value, 4930 Variable* var_value,
4924 Label* if_deleted) { 4931 Label* if_deleted) {
4925 Comment("[ LoadPropertyFromGlobalDictionary"); 4932 Comment("[ LoadPropertyFromGlobalDictionary");
4926 CSA_ASSERT(this, IsDictionary(dictionary)); 4933 CSA_ASSERT(this, IsDictionary(dictionary));
4927 4934
4928 const int name_to_value_offset = 4935 const int name_to_value_offset =
4929 (GlobalDictionary::kEntryValueIndex - GlobalDictionary::kEntryKeyIndex) * 4936 (GlobalDictionary::kEntryValueIndex - GlobalDictionary::kEntryKeyIndex) *
4930 kPointerSize; 4937 kPointerSize;
4931 4938
4932 Node* property_cell = 4939 Node* property_cell = LoadFixedArrayElement(
4933 LoadFixedArrayElement(dictionary, name_index, name_to_value_offset); 4940 dictionary, name_index, name_to_value_offset, INTPTR_PARAMETERS);
4934 4941
4935 Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset); 4942 Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
4936 GotoIf(WordEqual(value, TheHoleConstant()), if_deleted); 4943 GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
4937 4944
4938 var_value->Bind(value); 4945 var_value->Bind(value);
4939 4946
4940 Node* details = LoadAndUntagToWord32ObjectField(property_cell, 4947 Node* details = LoadAndUntagToWord32ObjectField(property_cell,
4941 PropertyCell::kDetailsOffset); 4948 PropertyCell::kDetailsOffset);
4942 var_details->Bind(details); 4949 var_details->Bind(details);
4943 4950
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
5172 5179
5173 Variable var_index(this, MachineType::PointerRepresentation()); 5180 Variable var_index(this, MachineType::PointerRepresentation());
5174 5181
5175 Label if_keyisindex(this), if_iskeyunique(this); 5182 Label if_keyisindex(this), if_iskeyunique(this);
5176 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout); 5183 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout);
5177 5184
5178 Bind(&if_iskeyunique); 5185 Bind(&if_iskeyunique);
5179 { 5186 {
5180 Variable var_holder(this, MachineRepresentation::kTagged); 5187 Variable var_holder(this, MachineRepresentation::kTagged);
5181 Variable var_holder_map(this, MachineRepresentation::kTagged); 5188 Variable var_holder_map(this, MachineRepresentation::kTagged);
5182 Variable var_holder_instance_type(this, MachineRepresentation::kWord8); 5189 Variable var_holder_instance_type(this, MachineRepresentation::kWord32);
5183 5190
5184 Variable* merged_variables[] = {&var_holder, &var_holder_map, 5191 Variable* merged_variables[] = {&var_holder, &var_holder_map,
5185 &var_holder_instance_type}; 5192 &var_holder_instance_type};
5186 Label loop(this, arraysize(merged_variables), merged_variables); 5193 Label loop(this, arraysize(merged_variables), merged_variables);
5187 var_holder.Bind(receiver); 5194 var_holder.Bind(receiver);
5188 var_holder_map.Bind(map); 5195 var_holder_map.Bind(map);
5189 var_holder_instance_type.Bind(instance_type); 5196 var_holder_instance_type.Bind(instance_type);
5190 Goto(&loop); 5197 Goto(&loop);
5191 Bind(&loop); 5198 Bind(&loop);
5192 { 5199 {
(...skipping 23 matching lines...) Expand all
5216 var_holder.Bind(proto); 5223 var_holder.Bind(proto);
5217 var_holder_map.Bind(map); 5224 var_holder_map.Bind(map);
5218 var_holder_instance_type.Bind(instance_type); 5225 var_holder_instance_type.Bind(instance_type);
5219 Goto(&loop); 5226 Goto(&loop);
5220 } 5227 }
5221 } 5228 }
5222 Bind(&if_keyisindex); 5229 Bind(&if_keyisindex);
5223 { 5230 {
5224 Variable var_holder(this, MachineRepresentation::kTagged); 5231 Variable var_holder(this, MachineRepresentation::kTagged);
5225 Variable var_holder_map(this, MachineRepresentation::kTagged); 5232 Variable var_holder_map(this, MachineRepresentation::kTagged);
5226 Variable var_holder_instance_type(this, MachineRepresentation::kWord8); 5233 Variable var_holder_instance_type(this, MachineRepresentation::kWord32);
5227 5234
5228 Variable* merged_variables[] = {&var_holder, &var_holder_map, 5235 Variable* merged_variables[] = {&var_holder, &var_holder_map,
5229 &var_holder_instance_type}; 5236 &var_holder_instance_type};
5230 Label loop(this, arraysize(merged_variables), merged_variables); 5237 Label loop(this, arraysize(merged_variables), merged_variables);
5231 var_holder.Bind(receiver); 5238 var_holder.Bind(receiver);
5232 var_holder_map.Bind(map); 5239 var_holder_map.Bind(map);
5233 var_holder_instance_type.Bind(instance_type); 5240 var_holder_instance_type.Bind(instance_type);
5234 Goto(&loop); 5241 Goto(&loop);
5235 Bind(&loop); 5242 Bind(&loop);
5236 { 5243 {
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
5441 } 5448 }
5442 5449
5443 void CodeStubAssembler::UpdateFeedback(Node* feedback, 5450 void CodeStubAssembler::UpdateFeedback(Node* feedback,
5444 Node* type_feedback_vector, 5451 Node* type_feedback_vector,
5445 Node* slot_id) { 5452 Node* slot_id) {
5446 // This method is used for binary op and compare feedback. These 5453 // This method is used for binary op and compare feedback. These
5447 // vector nodes are initialized with a smi 0, so we can simply OR 5454 // vector nodes are initialized with a smi 0, so we can simply OR
5448 // our new feedback in place. 5455 // our new feedback in place.
5449 // TODO(interpreter): Consider passing the feedback as Smi already to avoid 5456 // TODO(interpreter): Consider passing the feedback as Smi already to avoid
5450 // the tagging completely. 5457 // the tagging completely.
5451 Node* previous_feedback = 5458 Node* previous_feedback = LoadFixedArrayElement(type_feedback_vector, slot_id,
5452 LoadFixedArrayElement(type_feedback_vector, slot_id); 5459 0, INTPTR_PARAMETERS);
5453 Node* combined_feedback = SmiOr(previous_feedback, SmiFromWord32(feedback)); 5460 Node* combined_feedback = SmiOr(previous_feedback, SmiFromWord32(feedback));
5454 StoreFixedArrayElement(type_feedback_vector, slot_id, combined_feedback, 5461 StoreFixedArrayElement(type_feedback_vector, slot_id, combined_feedback,
5455 SKIP_WRITE_BARRIER); 5462 SKIP_WRITE_BARRIER, 0, INTPTR_PARAMETERS);
5456 } 5463 }
5457 5464
5458 Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) { 5465 Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) {
5459 Variable var_receiver_map(this, MachineRepresentation::kTagged); 5466 Variable var_receiver_map(this, MachineRepresentation::kTagged);
5460 Label load_smi_map(this, Label::kDeferred), load_receiver_map(this), 5467 Label load_smi_map(this, Label::kDeferred), load_receiver_map(this),
5461 if_result(this); 5468 if_result(this);
5462 5469
5463 Branch(TaggedIsSmi(receiver), &load_smi_map, &load_receiver_map); 5470 Branch(TaggedIsSmi(receiver), &load_smi_map, &load_receiver_map);
5464 Bind(&load_smi_map); 5471 Bind(&load_smi_map);
5465 { 5472 {
(...skipping 732 matching lines...) Expand 10 before | Expand all | Expand 10 after
6198 Node* zero_constant = SmiConstant(Smi::kZero); 6205 Node* zero_constant = SmiConstant(Smi::kZero);
6199 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime); 6206 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime);
6200 } 6207 }
6201 } 6208 }
6202 6209
6203 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector( 6210 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
6204 Node* feedback_vector, Node* slot) { 6211 Node* feedback_vector, Node* slot) {
6205 Node* size = IntPtrConstant(AllocationSite::kSize); 6212 Node* size = IntPtrConstant(AllocationSite::kSize);
6206 Node* site = Allocate(size, CodeStubAssembler::kPretenured); 6213 Node* site = Allocate(size, CodeStubAssembler::kPretenured);
6207 6214
6208 StoreMap(site, LoadRoot(Heap::kAllocationSiteMapRootIndex)); 6215 StoreMap(site, AllocationSiteMapConstant());
6209 Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind())); 6216 Node* kind = SmiConstant(GetInitialFastElementsKind());
6210 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset, 6217 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
6211 kind); 6218 kind);
6212 6219
6213 // Unlike literals, constructed arrays don't have nested sites 6220 // Unlike literals, constructed arrays don't have nested sites
6214 Node* zero = IntPtrConstant(0); 6221 Node* zero = SmiConstant(0);
6215 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero); 6222 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
6216 6223
6217 // Pretenuring calculation field. 6224 // Pretenuring calculation field.
6218 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset, 6225 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
6219 zero); 6226 zero);
6220 6227
6221 // Pretenuring memento creation count field. 6228 // Pretenuring memento creation count field.
6222 StoreObjectFieldNoWriteBarrier( 6229 StoreObjectFieldNoWriteBarrier(
6223 site, AllocationSite::kPretenureCreateCountOffset, zero); 6230 site, AllocationSite::kPretenureCreateCountOffset, zero);
6224 6231
(...skipping 1874 matching lines...) Expand 10 before | Expand all | Expand 10 after
8099 var_map_index.Bind(map_index); 8106 var_map_index.Bind(map_index);
8100 var_array_map.Bind(UndefinedConstant()); 8107 var_array_map.Bind(UndefinedConstant());
8101 Goto(&allocate_iterator); 8108 Goto(&allocate_iterator);
8102 } 8109 }
8103 } 8110 }
8104 8111
8105 Bind(&allocate_iterator); 8112 Bind(&allocate_iterator);
8106 { 8113 {
8107 Node* map = 8114 Node* map =
8108 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), 8115 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(),
8109 0, CodeStubAssembler::INTPTR_PARAMETERS); 8116 0, INTPTR_PARAMETERS);
8110 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map)); 8117 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
8111 Goto(&return_result); 8118 Goto(&return_result);
8112 } 8119 }
8113 8120
8114 Bind(&return_result); 8121 Bind(&return_result);
8115 return var_result.value(); 8122 return var_result.value();
8116 } 8123 }
8117 8124
8118 Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map, 8125 Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map,
8119 Node* map) { 8126 Node* map) {
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
8232 8239
8233 Node* CodeStubAssembler::IsDebugActive() { 8240 Node* CodeStubAssembler::IsDebugActive() {
8234 Node* is_debug_active = Load( 8241 Node* is_debug_active = Load(
8235 MachineType::Uint8(), 8242 MachineType::Uint8(),
8236 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); 8243 ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
8237 return WordNotEqual(is_debug_active, Int32Constant(0)); 8244 return WordNotEqual(is_debug_active, Int32Constant(0));
8238 } 8245 }
8239 8246
8240 } // namespace internal 8247 } // namespace internal
8241 } // namespace v8 8248 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698