Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2893253002: Revert of [csa] Add assertions to CSA (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | test/mjsunit/mjsunit.status » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
67 Branch(condition, &ok, &not_ok); 67 Branch(condition, &ok, &not_ok);
68 BIND(&not_ok); 68 BIND(&not_ok);
69 if (message != nullptr) { 69 if (message != nullptr) {
70 char chars[1024]; 70 char chars[1024];
71 Vector<char> buffer(chars); 71 Vector<char> buffer(chars);
72 if (file != nullptr) { 72 if (file != nullptr) {
73 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line); 73 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
74 } else { 74 } else {
75 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message); 75 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
76 } 76 }
77 CallRuntime(Runtime::kGlobalPrint, SmiConstant(0), 77 CallRuntime(
78 HeapConstant(factory()->InternalizeUtf8String(&(buffer[0])))); 78 Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
79 HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
79 } 80 }
80 DebugBreak(); 81 DebugBreak();
81 Goto(&ok); 82 Goto(&ok);
82 BIND(&ok); 83 BIND(&ok);
83 Comment("] Assert"); 84 Comment("] Assert");
84 } 85 }
85 86
86 Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body, 87 Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body,
87 const NodeGenerator& false_body, 88 const NodeGenerator& false_body,
88 MachineRepresentation rep) { 89 MachineRepresentation rep) {
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
190 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { 191 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
191 Comment("IntPtrRoundUpToPowerOfTwo32"); 192 Comment("IntPtrRoundUpToPowerOfTwo32");
192 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); 193 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
193 value = IntPtrSub(value, IntPtrConstant(1)); 194 value = IntPtrSub(value, IntPtrConstant(1));
194 for (int i = 1; i <= 16; i *= 2) { 195 for (int i = 1; i <= 16; i *= 2) {
195 value = WordOr(value, WordShr(value, IntPtrConstant(i))); 196 value = WordOr(value, WordShr(value, IntPtrConstant(i)));
196 } 197 }
197 return IntPtrAdd(value, IntPtrConstant(1)); 198 return IntPtrAdd(value, IntPtrConstant(1));
198 } 199 }
199 200
200 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
201 return (mode == SMI_PARAMETERS) ? TaggedIsSmi(value) : Int32Constant(1);
202 }
203
204 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { 201 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
205 // value && !(value & (value - 1)) 202 // value && !(value & (value - 1))
206 return WordEqual( 203 return WordEqual(
207 Select( 204 Select(
208 WordEqual(value, IntPtrConstant(0)), 205 WordEqual(value, IntPtrConstant(0)),
209 [=] { return IntPtrConstant(1); }, 206 [=] { return IntPtrConstant(1); },
210 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, 207 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
211 MachineType::PointerRepresentation()), 208 MachineType::PointerRepresentation()),
212 IntPtrConstant(0)); 209 IntPtrConstant(0));
213 } 210 }
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
668 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) { 665 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
669 CodeAssembler::Bind(label, debug_info); 666 CodeAssembler::Bind(label, debug_info);
670 } 667 }
671 #else 668 #else
672 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); } 669 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
673 #endif // DEBUG 670 #endif // DEBUG
674 671
675 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( 672 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
676 Node* receiver_map, Label* definitely_no_elements, 673 Node* receiver_map, Label* definitely_no_elements,
677 Label* possibly_elements) { 674 Label* possibly_elements) {
678 CSA_SLOW_ASSERT(this, IsMap(receiver_map));
679 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); 675 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
680 Label loop_body(this, &var_map); 676 Label loop_body(this, &var_map);
681 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); 677 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
682 Goto(&loop_body); 678 Goto(&loop_body);
683 679
684 BIND(&loop_body); 680 BIND(&loop_body);
685 { 681 {
686 Node* map = var_map.value(); 682 Node* map = var_map.value();
687 Node* prototype = LoadMapPrototype(map); 683 Node* prototype = LoadMapPrototype(map);
688 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); 684 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
(...skipping 520 matching lines...) Expand 10 before | Expand all | Expand 10 after
1209 result.Bind( 1205 result.Bind(
1210 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); 1206 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
1211 Goto(&loop); 1207 Goto(&loop);
1212 } 1208 }
1213 BIND(&done); 1209 BIND(&done);
1214 return result.value(); 1210 return result.value();
1215 } 1211 }
1216 1212
1217 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( 1213 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
1218 Node* shared, int offset, ParameterMode mode) { 1214 Node* shared, int offset, ParameterMode mode) {
1219 CSA_SLOW_ASSERT(this, HasInstanceType(shared, SHARED_FUNCTION_INFO_TYPE));
1220 if (Is64()) { 1215 if (Is64()) {
1221 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); 1216 Node* result = LoadObjectField(shared, offset, MachineType::Int32());
1222 if (mode == SMI_PARAMETERS) { 1217 if (mode == SMI_PARAMETERS) {
1223 result = SmiTag(result); 1218 result = SmiTag(result);
1224 } else { 1219 } else {
1225 result = ChangeUint32ToWord(result); 1220 result = ChangeUint32ToWord(result);
1226 } 1221 }
1227 return result; 1222 return result;
1228 } else { 1223 } else {
1229 Node* result = LoadObjectField(shared, offset); 1224 Node* result = LoadObjectField(shared, offset);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1354 return AllocateHeapNumberWithValue(value); 1349 return AllocateHeapNumberWithValue(value);
1355 default: 1350 default:
1356 UNREACHABLE(); 1351 UNREACHABLE();
1357 return nullptr; 1352 return nullptr;
1358 } 1353 }
1359 } 1354 }
1360 1355
1361 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( 1356 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1362 Node* object, Node* index_node, int additional_offset, 1357 Node* object, Node* index_node, int additional_offset,
1363 ParameterMode parameter_mode) { 1358 ParameterMode parameter_mode) {
1364 CSA_SLOW_ASSERT(this, IsFixedArray(object));
1365 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1366 int32_t header_size = 1359 int32_t header_size =
1367 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; 1360 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1368 #if V8_TARGET_LITTLE_ENDIAN 1361 #if V8_TARGET_LITTLE_ENDIAN
1369 if (Is64()) { 1362 if (Is64()) {
1370 header_size += kPointerSize / 2; 1363 header_size += kPointerSize / 2;
1371 } 1364 }
1372 #endif 1365 #endif
1373 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, 1366 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1374 parameter_mode, header_size); 1367 parameter_mode, header_size);
1375 if (Is64()) { 1368 if (Is64()) {
1376 return Load(MachineType::Int32(), object, offset); 1369 return Load(MachineType::Int32(), object, offset);
1377 } else { 1370 } else {
1378 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); 1371 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1379 } 1372 }
1380 } 1373 }
1381 1374
1382 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( 1375 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1383 Node* object, Node* index_node, MachineType machine_type, 1376 Node* object, Node* index_node, MachineType machine_type,
1384 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { 1377 int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1385 CSA_SLOW_ASSERT(this, IsFixedDoubleArray(object));
1386 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1387 CSA_ASSERT(this, IsFixedDoubleArray(object)); 1378 CSA_ASSERT(this, IsFixedDoubleArray(object));
1388 int32_t header_size = 1379 int32_t header_size =
1389 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; 1380 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1390 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, 1381 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
1391 parameter_mode, header_size); 1382 parameter_mode, header_size);
1392 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); 1383 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1393 } 1384 }
1394 1385
1395 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, 1386 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1396 Label* if_hole, 1387 Label* if_hole,
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
1544 } else { 1535 } else {
1545 return StoreObjectField(object, offset, LoadRoot(root_index)); 1536 return StoreObjectField(object, offset, LoadRoot(root_index));
1546 } 1537 }
1547 } 1538 }
1548 1539
1549 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, 1540 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1550 Node* value, 1541 Node* value,
1551 WriteBarrierMode barrier_mode, 1542 WriteBarrierMode barrier_mode,
1552 int additional_offset, 1543 int additional_offset,
1553 ParameterMode parameter_mode) { 1544 ParameterMode parameter_mode) {
1554 CSA_SLOW_ASSERT(this, IsFixedArray(object));
1555 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1556 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || 1545 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1557 barrier_mode == UPDATE_WRITE_BARRIER); 1546 barrier_mode == UPDATE_WRITE_BARRIER);
1558 int header_size = 1547 int header_size =
1559 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; 1548 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1560 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, 1549 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1561 parameter_mode, header_size); 1550 parameter_mode, header_size);
1562 if (barrier_mode == SKIP_WRITE_BARRIER) { 1551 if (barrier_mode == SKIP_WRITE_BARRIER) {
1563 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, 1552 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1564 value); 1553 value);
1565 } else { 1554 } else {
1566 return Store(object, offset, value); 1555 return Store(object, offset, value);
1567 } 1556 }
1568 } 1557 }
1569 1558
1570 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( 1559 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1571 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { 1560 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1572 CSA_ASSERT(this, IsFixedDoubleArray(object)); 1561 CSA_ASSERT(this, IsFixedDoubleArray(object));
1573 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1574 Node* offset = 1562 Node* offset =
1575 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, 1563 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1576 FixedArray::kHeaderSize - kHeapObjectTag); 1564 FixedArray::kHeaderSize - kHeapObjectTag);
1577 MachineRepresentation rep = MachineRepresentation::kFloat64; 1565 MachineRepresentation rep = MachineRepresentation::kFloat64;
1578 return StoreNoWriteBarrier(rep, object, offset, value); 1566 return StoreNoWriteBarrier(rep, object, offset, value);
1579 } 1567 }
1580 1568
1581 void CodeStubAssembler::EnsureArrayLengthWritable(Node* map, Label* bailout) { 1569 void CodeStubAssembler::EnsureArrayLengthWritable(Node* map, Label* bailout) {
1582 // Check whether the length property is writable. The length property is the 1570 // Check whether the length property is writable. The length property is the
1583 // only default named property on arrays. It's nonconfigurable, hence is 1571 // only default named property on arrays. It's nonconfigurable, hence is
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1625 kind, capacity, new_capacity, mode, 1613 kind, capacity, new_capacity, mode,
1626 bailout)); 1614 bailout));
1627 Goto(&fits); 1615 Goto(&fits);
1628 BIND(&fits); 1616 BIND(&fits);
1629 } 1617 }
1630 1618
1631 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, 1619 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1632 CodeStubArguments& args, 1620 CodeStubArguments& args,
1633 Variable& arg_index, 1621 Variable& arg_index,
1634 Label* bailout) { 1622 Label* bailout) {
1635 CSA_SLOW_ASSERT(this, IsJSArray(array));
1636 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); 1623 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1637 Label pre_bailout(this); 1624 Label pre_bailout(this);
1638 Label success(this); 1625 Label success(this);
1639 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); 1626 VARIABLE(var_tagged_length, MachineRepresentation::kTagged);
1640 ParameterMode mode = OptimalParameterMode(); 1627 ParameterMode mode = OptimalParameterMode();
1641 VARIABLE(var_length, OptimalParameterRepresentation(), 1628 VARIABLE(var_length, OptimalParameterRepresentation(),
1642 TaggedToParameter(LoadJSArrayLength(array), mode)); 1629 TaggedToParameter(LoadJSArrayLength(array), mode));
1643 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); 1630 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1644 1631
1645 // Resize the capacity of the fixed array if it doesn't fit. 1632 // Resize the capacity of the fixed array if it doesn't fit.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1696 Float64SilenceNaN(double_value), mode); 1683 Float64SilenceNaN(double_value), mode);
1697 } else { 1684 } else {
1698 WriteBarrierMode barrier_mode = 1685 WriteBarrierMode barrier_mode =
1699 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; 1686 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
1700 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); 1687 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
1701 } 1688 }
1702 } 1689 }
1703 1690
1704 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, 1691 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1705 Node* value, Label* bailout) { 1692 Node* value, Label* bailout) {
1706 CSA_SLOW_ASSERT(this, IsJSArray(array));
1707 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); 1693 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1708 ParameterMode mode = OptimalParameterMode(); 1694 ParameterMode mode = OptimalParameterMode();
1709 VARIABLE(var_length, OptimalParameterRepresentation(), 1695 VARIABLE(var_length, OptimalParameterRepresentation(),
1710 TaggedToParameter(LoadJSArrayLength(array), mode)); 1696 TaggedToParameter(LoadJSArrayLength(array), mode));
1711 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); 1697 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1712 1698
1713 // Resize the capacity of the fixed array if it doesn't fit. 1699 // Resize the capacity of the fixed array if it doesn't fit.
1714 Node* growth = IntPtrOrSmiConstant(1, mode); 1700 Node* growth = IntPtrOrSmiConstant(1, mode);
1715 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), 1701 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1716 &var_elements, growth, bailout); 1702 &var_elements, growth, bailout);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1756 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, 1742 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1757 IntPtrConstant(String::kEmptyHashField), 1743 IntPtrConstant(String::kEmptyHashField),
1758 MachineType::PointerRepresentation()); 1744 MachineType::PointerRepresentation());
1759 return result; 1745 return result;
1760 } 1746 }
1761 1747
1762 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, 1748 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1763 ParameterMode mode, 1749 ParameterMode mode,
1764 AllocationFlags flags) { 1750 AllocationFlags flags) {
1765 Comment("AllocateSeqOneByteString"); 1751 Comment("AllocateSeqOneByteString");
1766 CSA_SLOW_ASSERT(this, IsFixedArray(context));
1767 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
1768 VARIABLE(var_result, MachineRepresentation::kTagged); 1752 VARIABLE(var_result, MachineRepresentation::kTagged);
1769 1753
1770 // Compute the SeqOneByteString size and check if it fits into new space. 1754 // Compute the SeqOneByteString size and check if it fits into new space.
1771 Label if_lengthiszero(this), if_sizeissmall(this), 1755 Label if_lengthiszero(this), if_sizeissmall(this),
1772 if_notsizeissmall(this, Label::kDeferred), if_join(this); 1756 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1773 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); 1757 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1774 1758
1775 Node* raw_size = GetArrayAllocationSize( 1759 Node* raw_size = GetArrayAllocationSize(
1776 length, UINT8_ELEMENTS, mode, 1760 length, UINT8_ELEMENTS, mode,
1777 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1761 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1828 // Initialize both used and unused parts of hash field slot at once. 1812 // Initialize both used and unused parts of hash field slot at once.
1829 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, 1813 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1830 IntPtrConstant(String::kEmptyHashField), 1814 IntPtrConstant(String::kEmptyHashField),
1831 MachineType::PointerRepresentation()); 1815 MachineType::PointerRepresentation());
1832 return result; 1816 return result;
1833 } 1817 }
1834 1818
1835 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, 1819 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1836 ParameterMode mode, 1820 ParameterMode mode,
1837 AllocationFlags flags) { 1821 AllocationFlags flags) {
1838 CSA_SLOW_ASSERT(this, IsFixedArray(context));
1839 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
1840 Comment("AllocateSeqTwoByteString"); 1822 Comment("AllocateSeqTwoByteString");
1841 VARIABLE(var_result, MachineRepresentation::kTagged); 1823 VARIABLE(var_result, MachineRepresentation::kTagged);
1842 1824
1843 // Compute the SeqTwoByteString size and check if it fits into new space. 1825 // Compute the SeqTwoByteString size and check if it fits into new space.
1844 Label if_lengthiszero(this), if_sizeissmall(this), 1826 Label if_lengthiszero(this), if_sizeissmall(this),
1845 if_notsizeissmall(this, Label::kDeferred), if_join(this); 1827 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1846 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); 1828 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1847 1829
1848 Node* raw_size = GetArrayAllocationSize( 1830 Node* raw_size = GetArrayAllocationSize(
1849 length, UINT16_ELEMENTS, mode, 1831 length, UINT16_ELEMENTS, mode,
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1885 Goto(&if_join); 1867 Goto(&if_join);
1886 } 1868 }
1887 1869
1888 BIND(&if_join); 1870 BIND(&if_join);
1889 return var_result.value(); 1871 return var_result.value();
1890 } 1872 }
1891 1873
1892 Node* CodeStubAssembler::AllocateSlicedString( 1874 Node* CodeStubAssembler::AllocateSlicedString(
1893 Heap::RootListIndex map_root_index, Node* length, Node* parent, 1875 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1894 Node* offset) { 1876 Node* offset) {
1895 CSA_ASSERT(this, IsString(parent));
1896 CSA_ASSERT(this, TaggedIsSmi(length)); 1877 CSA_ASSERT(this, TaggedIsSmi(length));
1897 CSA_ASSERT(this, TaggedIsSmi(offset));
1898 Node* result = Allocate(SlicedString::kSize); 1878 Node* result = Allocate(SlicedString::kSize);
1899 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1879 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1900 StoreMapNoWriteBarrier(result, map_root_index); 1880 StoreMapNoWriteBarrier(result, map_root_index);
1901 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, 1881 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1902 MachineRepresentation::kTagged); 1882 MachineRepresentation::kTagged);
1903 // Initialize both used and unused parts of hash field slot at once. 1883 // Initialize both used and unused parts of hash field slot at once.
1904 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, 1884 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
1905 IntPtrConstant(String::kEmptyHashField), 1885 IntPtrConstant(String::kEmptyHashField),
1906 MachineType::PointerRepresentation()); 1886 MachineType::PointerRepresentation());
1907 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, 1887 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
(...skipping 12 matching lines...) Expand all
1920 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, 1900 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1921 Node* offset) { 1901 Node* offset) {
1922 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, 1902 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1923 offset); 1903 offset);
1924 } 1904 }
1925 1905
1926 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, 1906 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1927 Node* length, Node* first, 1907 Node* length, Node* first,
1928 Node* second, 1908 Node* second,
1929 AllocationFlags flags) { 1909 AllocationFlags flags) {
1930 CSA_ASSERT(this, IsString(first));
1931 CSA_ASSERT(this, IsString(second));
1932 CSA_ASSERT(this, TaggedIsSmi(length)); 1910 CSA_ASSERT(this, TaggedIsSmi(length));
1933 Node* result = Allocate(ConsString::kSize, flags); 1911 Node* result = Allocate(ConsString::kSize, flags);
1934 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1912 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1935 StoreMapNoWriteBarrier(result, map_root_index); 1913 StoreMapNoWriteBarrier(result, map_root_index);
1936 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, 1914 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1937 MachineRepresentation::kTagged); 1915 MachineRepresentation::kTagged);
1938 // Initialize both used and unused parts of hash field slot at once. 1916 // Initialize both used and unused parts of hash field slot at once.
1939 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, 1917 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
1940 IntPtrConstant(String::kEmptyHashField), 1918 IntPtrConstant(String::kEmptyHashField),
1941 MachineType::PointerRepresentation()); 1919 MachineType::PointerRepresentation());
(...skipping 19 matching lines...) Expand all
1961 1939
1962 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, 1940 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
1963 Node* second, 1941 Node* second,
1964 AllocationFlags flags) { 1942 AllocationFlags flags) {
1965 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, 1943 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
1966 second, flags); 1944 second, flags);
1967 } 1945 }
1968 1946
1969 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, 1947 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
1970 Node* right, AllocationFlags flags) { 1948 Node* right, AllocationFlags flags) {
1971 CSA_ASSERT(this, IsFixedArray(context));
1972 CSA_ASSERT(this, IsString(left));
1973 CSA_ASSERT(this, IsString(right));
1974 CSA_ASSERT(this, TaggedIsSmi(length)); 1949 CSA_ASSERT(this, TaggedIsSmi(length));
1975 // Added string can be a cons string. 1950 // Added string can be a cons string.
1976 Comment("Allocating ConsString"); 1951 Comment("Allocating ConsString");
1977 Node* left_instance_type = LoadInstanceType(left); 1952 Node* left_instance_type = LoadInstanceType(left);
1978 Node* right_instance_type = LoadInstanceType(right); 1953 Node* right_instance_type = LoadInstanceType(right);
1979 1954
1980 // Compute intersection and difference of instance types. 1955 // Compute intersection and difference of instance types.
1981 Node* anded_instance_types = 1956 Node* anded_instance_types =
1982 Word32And(left_instance_type, right_instance_type); 1957 Word32And(left_instance_type, right_instance_type);
1983 Node* xored_instance_types = 1958 Node* xored_instance_types =
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
2019 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); 1994 result.Bind(AllocateTwoByteConsString(length, left, right, flags));
2020 Goto(&done); 1995 Goto(&done);
2021 1996
2022 BIND(&done); 1997 BIND(&done);
2023 1998
2024 return result.value(); 1999 return result.value();
2025 } 2000 }
2026 2001
2027 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, 2002 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
2028 Node* index, Node* input) { 2003 Node* index, Node* input) {
2029 CSA_ASSERT(this, IsFixedArray(context));
2030 CSA_ASSERT(this, TaggedIsSmi(index));
2031 CSA_ASSERT(this, TaggedIsSmi(length));
2032 CSA_ASSERT(this, IsString(input));
2033
2034 #ifdef DEBUG
2035 Node* const max_length = 2004 Node* const max_length =
2036 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); 2005 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
2037 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); 2006 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
2038 #endif // DEBUG 2007 USE(max_length);
2039 2008
2040 // Allocate the JSRegExpResult. 2009 // Allocate the JSRegExpResult.
2041 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove 2010 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
2042 // unneeded store of elements. 2011 // unneeded store of elements.
2043 Node* const result = Allocate(JSRegExpResult::kSize); 2012 Node* const result = Allocate(JSRegExpResult::kSize);
2044 2013
2045 // TODO(jgruber): Store map as Heap constant? 2014 // TODO(jgruber): Store map as Heap constant?
2046 Node* const native_context = LoadNativeContext(context); 2015 Node* const native_context = LoadNativeContext(context);
2047 Node* const map = 2016 Node* const map =
2048 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); 2017 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
2149 Node* size = TimesPointerSize(LoadMapInstanceSize(map)); 2118 Node* size = TimesPointerSize(LoadMapInstanceSize(map));
2150 Node* object = AllocateInNewSpace(size, flags); 2119 Node* object = AllocateInNewSpace(size, flags);
2151 StoreMapNoWriteBarrier(object, map); 2120 StoreMapNoWriteBarrier(object, map);
2152 InitializeJSObjectFromMap(object, map, size, properties, elements); 2121 InitializeJSObjectFromMap(object, map, size, properties, elements);
2153 return object; 2122 return object;
2154 } 2123 }
2155 2124
2156 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, 2125 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
2157 Node* size, Node* properties, 2126 Node* size, Node* properties,
2158 Node* elements) { 2127 Node* elements) {
2159 CSA_SLOW_ASSERT(this, IsMap(map));
2160 // This helper assumes that the object is in new-space, as guarded by the 2128 // This helper assumes that the object is in new-space, as guarded by the
2161 // check in AllocatedJSObjectFromMap. 2129 // check in AllocatedJSObjectFromMap.
2162 if (properties == nullptr) { 2130 if (properties == nullptr) {
2163 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); 2131 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
2164 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, 2132 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
2165 Heap::kEmptyFixedArrayRootIndex); 2133 Heap::kEmptyFixedArrayRootIndex);
2166 } else { 2134 } else {
2167 CSA_ASSERT(this, IsFixedArray(properties));
2168 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, 2135 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
2169 properties); 2136 properties);
2170 } 2137 }
2171 if (elements == nullptr) { 2138 if (elements == nullptr) {
2172 StoreObjectFieldRoot(object, JSObject::kElementsOffset, 2139 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
2173 Heap::kEmptyFixedArrayRootIndex); 2140 Heap::kEmptyFixedArrayRootIndex);
2174 } else { 2141 } else {
2175 CSA_ASSERT(this, IsFixedArray(elements));
2176 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); 2142 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
2177 } 2143 }
2178 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); 2144 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
2179 } 2145 }
2180 2146
2181 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, 2147 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
2182 Node* size, int start_offset) { 2148 Node* size, int start_offset) {
2183 CSA_SLOW_ASSERT(this, IsMap(map));
2184 // TODO(cbruni): activate in-object slack tracking machinery. 2149 // TODO(cbruni): activate in-object slack tracking machinery.
2185 Comment("InitializeJSObjectBody"); 2150 Comment("InitializeJSObjectBody");
2186 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); 2151 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
2187 // Calculate the untagged field addresses. 2152 // Calculate the untagged field addresses.
2188 object = BitcastTaggedToWord(object); 2153 object = BitcastTaggedToWord(object);
2189 Node* start_address = 2154 Node* start_address =
2190 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); 2155 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
2191 Node* end_address = 2156 Node* end_address =
2192 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); 2157 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
2193 StoreFieldsNoWriteBarrier(start_address, end_address, filler); 2158 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2194 } 2159 }
2195 2160
2196 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, 2161 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
2197 Node* end_address, 2162 Node* end_address,
2198 Node* value) { 2163 Node* value) {
2199 Comment("StoreFieldsNoWriteBarrier"); 2164 Comment("StoreFieldsNoWriteBarrier");
2200 CSA_ASSERT(this, WordIsWordAligned(start_address)); 2165 CSA_ASSERT(this, WordIsWordAligned(start_address));
2201 CSA_ASSERT(this, WordIsWordAligned(end_address)); 2166 CSA_ASSERT(this, WordIsWordAligned(end_address));
2202 BuildFastLoop(start_address, end_address, 2167 BuildFastLoop(start_address, end_address,
2203 [this, value](Node* current) { 2168 [this, value](Node* current) {
2204 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, 2169 StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
2205 value); 2170 value);
2206 }, 2171 },
2207 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 2172 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2208 } 2173 }
2209 2174
2210 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( 2175 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
2211 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { 2176 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
2212 Comment("begin allocation of JSArray without elements"); 2177 Comment("begin allocation of JSArray without elements");
2213 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2214 CSA_SLOW_ASSERT(this, IsMap(array_map));
2215 int base_size = JSArray::kSize; 2178 int base_size = JSArray::kSize;
2216 if (allocation_site != nullptr) { 2179 if (allocation_site != nullptr) {
2217 base_size += AllocationMemento::kSize; 2180 base_size += AllocationMemento::kSize;
2218 } 2181 }
2219 2182
2220 Node* size = IntPtrConstant(base_size); 2183 Node* size = IntPtrConstant(base_size);
2221 Node* array = AllocateUninitializedJSArray(kind, array_map, length, 2184 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2222 allocation_site, size); 2185 allocation_site, size);
2223 return array; 2186 return array;
2224 } 2187 }
2225 2188
2226 std::pair<Node*, Node*> 2189 std::pair<Node*, Node*>
2227 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( 2190 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
2228 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, 2191 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
2229 Node* capacity, ParameterMode capacity_mode) { 2192 Node* capacity, ParameterMode capacity_mode) {
2230 Comment("begin allocation of JSArray with elements"); 2193 Comment("begin allocation of JSArray with elements");
2231 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2232 CSA_SLOW_ASSERT(this, IsMap(array_map));
2233 int base_size = JSArray::kSize; 2194 int base_size = JSArray::kSize;
2234 2195
2235 if (allocation_site != nullptr) { 2196 if (allocation_site != nullptr) {
2236 base_size += AllocationMemento::kSize; 2197 base_size += AllocationMemento::kSize;
2237 } 2198 }
2238 2199
2239 int elements_offset = base_size; 2200 int elements_offset = base_size;
2240 2201
2241 // Compute space for elements 2202 // Compute space for elements
2242 base_size += FixedArray::kHeaderSize; 2203 base_size += FixedArray::kHeaderSize;
2243 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); 2204 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
2244 2205
2245 Node* array = AllocateUninitializedJSArray(kind, array_map, length, 2206 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2246 allocation_site, size); 2207 allocation_site, size);
2247 2208
2248 Node* elements = InnerAllocate(array, elements_offset); 2209 Node* elements = InnerAllocate(array, elements_offset);
2249 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); 2210 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
2250 2211
2251 return {array, elements}; 2212 return {array, elements};
2252 } 2213 }
2253 2214
2254 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, 2215 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
2255 Node* array_map, 2216 Node* array_map,
2256 Node* length, 2217 Node* length,
2257 Node* allocation_site, 2218 Node* allocation_site,
2258 Node* size_in_bytes) { 2219 Node* size_in_bytes) {
2259 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2260 CSA_SLOW_ASSERT(this, IsMap(array_map));
2261
2262 // Allocate space for the JSArray and the elements FixedArray in one go. 2220 // Allocate space for the JSArray and the elements FixedArray in one go.
2263 Node* array = AllocateInNewSpace(size_in_bytes); 2221 Node* array = AllocateInNewSpace(size_in_bytes);
2264 2222
2265 Comment("write JSArray headers"); 2223 Comment("write JSArray headers");
2266 StoreMapNoWriteBarrier(array, array_map); 2224 StoreMapNoWriteBarrier(array, array_map);
2267 2225
2226 CSA_ASSERT(this, TaggedIsSmi(length));
2268 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); 2227 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2269 2228
2270 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, 2229 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
2271 Heap::kEmptyFixedArrayRootIndex); 2230 Heap::kEmptyFixedArrayRootIndex);
2272 2231
2273 if (allocation_site != nullptr) { 2232 if (allocation_site != nullptr) {
2274 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); 2233 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
2275 } 2234 }
2276 return array; 2235 return array;
2277 } 2236 }
2278 2237
2279 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, 2238 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
2280 Node* capacity, Node* length, 2239 Node* capacity, Node* length,
2281 Node* allocation_site, 2240 Node* allocation_site,
2282 ParameterMode capacity_mode) { 2241 ParameterMode capacity_mode) {
2283 CSA_SLOW_ASSERT(this, IsMap(array_map));
2284 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2285 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
2286
2287 Node *array = nullptr, *elements = nullptr; 2242 Node *array = nullptr, *elements = nullptr;
2288 if (IsIntPtrOrSmiConstantZero(capacity)) { 2243 if (IsIntPtrOrSmiConstantZero(capacity)) {
2289 // Array is empty. Use the shared empty fixed array instead of allocating a 2244 // Array is empty. Use the shared empty fixed array instead of allocating a
2290 // new one. 2245 // new one.
2291 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, 2246 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
2292 nullptr); 2247 nullptr);
2293 StoreObjectFieldRoot(array, JSArray::kElementsOffset, 2248 StoreObjectFieldRoot(array, JSArray::kElementsOffset,
2294 Heap::kEmptyFixedArrayRootIndex); 2249 Heap::kEmptyFixedArrayRootIndex);
2295 } else { 2250 } else {
2296 // Allocate both array and elements object, and initialize the JSArray. 2251 // Allocate both array and elements object, and initialize the JSArray.
(...skipping 13 matching lines...) Expand all
2310 Heap::kTheHoleValueRootIndex, capacity_mode); 2265 Heap::kTheHoleValueRootIndex, capacity_mode);
2311 } 2266 }
2312 2267
2313 return array; 2268 return array;
2314 } 2269 }
2315 2270
2316 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, 2271 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
2317 Node* capacity_node, 2272 Node* capacity_node,
2318 ParameterMode mode, 2273 ParameterMode mode,
2319 AllocationFlags flags) { 2274 AllocationFlags flags) {
2320 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
2321 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, 2275 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2322 IntPtrOrSmiConstant(0, mode), mode)); 2276 IntPtrOrSmiConstant(0, mode), mode));
2323 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); 2277 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
2324 2278
2325 // Allocate both array and elements object, and initialize the JSArray. 2279 // Allocate both array and elements object, and initialize the JSArray.
2326 Node* array = Allocate(total_size, flags); 2280 Node* array = Allocate(total_size, flags);
2327 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) 2281 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
2328 ? Heap::kFixedDoubleArrayMapRootIndex 2282 ? Heap::kFixedDoubleArrayMapRootIndex
2329 : Heap::kFixedArrayMapRootIndex; 2283 : Heap::kFixedArrayMapRootIndex;
2330 DCHECK(Heap::RootIsImmortalImmovable(map_index)); 2284 DCHECK(Heap::RootIsImmortalImmovable(map_index));
2331 StoreMapNoWriteBarrier(array, map_index); 2285 StoreMapNoWriteBarrier(array, map_index);
2332 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, 2286 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2333 ParameterToTagged(capacity_node, mode)); 2287 ParameterToTagged(capacity_node, mode));
2334 return array; 2288 return array;
2335 } 2289 }
2336 2290
2337 void CodeStubAssembler::FillFixedArrayWithValue( 2291 void CodeStubAssembler::FillFixedArrayWithValue(
2338 ElementsKind kind, Node* array, Node* from_node, Node* to_node, 2292 ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2339 Heap::RootListIndex value_root_index, ParameterMode mode) { 2293 Heap::RootListIndex value_root_index, ParameterMode mode) {
2340 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
2341 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
2342 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
2343 bool is_double = IsFastDoubleElementsKind(kind); 2294 bool is_double = IsFastDoubleElementsKind(kind);
2344 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || 2295 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
2345 value_root_index == Heap::kUndefinedValueRootIndex); 2296 value_root_index == Heap::kUndefinedValueRootIndex);
2346 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); 2297 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
2347 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); 2298 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2348 Node* double_hole = 2299 Node* double_hole =
2349 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); 2300 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2350 Node* value = LoadRoot(value_root_index); 2301 Node* value = LoadRoot(value_root_index);
2351 2302
2352 BuildFastFixedArrayForEach( 2303 BuildFastFixedArrayForEach(
(...skipping 23 matching lines...) Expand all
2376 value); 2327 value);
2377 } 2328 }
2378 }, 2329 },
2379 mode); 2330 mode);
2380 } 2331 }
2381 2332
2382 void CodeStubAssembler::CopyFixedArrayElements( 2333 void CodeStubAssembler::CopyFixedArrayElements(
2383 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, 2334 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
2384 Node* to_array, Node* element_count, Node* capacity, 2335 Node* to_array, Node* element_count, Node* capacity,
2385 WriteBarrierMode barrier_mode, ParameterMode mode) { 2336 WriteBarrierMode barrier_mode, ParameterMode mode) {
2386 CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
2387 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2388 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
2389 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
2390 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 2337 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
2391 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 2338 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
2392 Comment("[ CopyFixedArrayElements"); 2339 Comment("[ CopyFixedArrayElements");
2393 2340
2394 // Typed array elements are not supported. 2341 // Typed array elements are not supported.
2395 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); 2342 DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
2396 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); 2343 DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
2397 2344
2398 Label done(this); 2345 Label done(this);
2399 bool from_double_elements = IsFastDoubleElementsKind(from_kind); 2346 bool from_double_elements = IsFastDoubleElementsKind(from_kind);
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
2516 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); 2463 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
2517 Comment("] CopyFixedArrayElements"); 2464 Comment("] CopyFixedArrayElements");
2518 } 2465 }
2519 2466
2520 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, 2467 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
2521 Node* from_index, Node* to_index, 2468 Node* from_index, Node* to_index,
2522 Node* character_count, 2469 Node* character_count,
2523 String::Encoding from_encoding, 2470 String::Encoding from_encoding,
2524 String::Encoding to_encoding, 2471 String::Encoding to_encoding,
2525 ParameterMode mode) { 2472 ParameterMode mode) {
2526 // Cannot assert IsString(from_string) and IsString(to_string) here because
2527 // CSA::SubString can pass in faked sequential strings when handling external
2528 // subject strings.
2529 CSA_SLOW_ASSERT(this, MatchesParameterMode(character_count, mode));
2530 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_index, mode));
2531 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_index, mode));
2532 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; 2473 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
2533 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; 2474 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
2534 DCHECK_IMPLIES(to_one_byte, from_one_byte); 2475 DCHECK_IMPLIES(to_one_byte, from_one_byte);
2535 Comment("CopyStringCharacters %s -> %s", 2476 Comment("CopyStringCharacters %s -> %s",
2536 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", 2477 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
2537 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); 2478 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
2538 2479
2539 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; 2480 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2540 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; 2481 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2541 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); 2482 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2580 } 2521 }
2581 }, 2522 },
2582 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 2523 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2583 } 2524 }
2584 2525
2585 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, 2526 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2586 Node* offset, 2527 Node* offset,
2587 ElementsKind from_kind, 2528 ElementsKind from_kind,
2588 ElementsKind to_kind, 2529 ElementsKind to_kind,
2589 Label* if_hole) { 2530 Label* if_hole) {
2590 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
2591 if (IsFastDoubleElementsKind(from_kind)) { 2531 if (IsFastDoubleElementsKind(from_kind)) {
2592 Node* value = 2532 Node* value =
2593 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); 2533 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
2594 if (!IsFastDoubleElementsKind(to_kind)) { 2534 if (!IsFastDoubleElementsKind(to_kind)) {
2595 value = AllocateHeapNumberWithValue(value); 2535 value = AllocateHeapNumberWithValue(value);
2596 } 2536 }
2597 return value; 2537 return value;
2598 2538
2599 } else { 2539 } else {
2600 Node* value = Load(MachineType::AnyTagged(), array, offset); 2540 Node* value = Load(MachineType::AnyTagged(), array, offset);
2601 if (if_hole) { 2541 if (if_hole) {
2602 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); 2542 GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
2603 } 2543 }
2604 if (IsFastDoubleElementsKind(to_kind)) { 2544 if (IsFastDoubleElementsKind(to_kind)) {
2605 if (IsFastSmiElementsKind(from_kind)) { 2545 if (IsFastSmiElementsKind(from_kind)) {
2606 value = SmiToFloat64(value); 2546 value = SmiToFloat64(value);
2607 } else { 2547 } else {
2608 value = LoadHeapNumberValue(value); 2548 value = LoadHeapNumberValue(value);
2609 } 2549 }
2610 } 2550 }
2611 return value; 2551 return value;
2612 } 2552 }
2613 } 2553 }
2614 2554
2615 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, 2555 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
2616 ParameterMode mode) { 2556 ParameterMode mode) {
2617 CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
2618 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); 2557 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
2619 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); 2558 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
2620 Node* padding = IntPtrOrSmiConstant(16, mode); 2559 Node* padding = IntPtrOrSmiConstant(16, mode);
2621 return IntPtrOrSmiAdd(new_capacity, padding, mode); 2560 return IntPtrOrSmiAdd(new_capacity, padding, mode);
2622 } 2561 }
2623 2562
2624 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, 2563 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2625 ElementsKind kind, Node* key, 2564 ElementsKind kind, Node* key,
2626 Label* bailout) { 2565 Label* bailout) {
2627 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2628 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
2629 CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
2630 Node* capacity = LoadFixedArrayBaseLength(elements); 2566 Node* capacity = LoadFixedArrayBaseLength(elements);
2631 2567
2632 ParameterMode mode = OptimalParameterMode(); 2568 ParameterMode mode = OptimalParameterMode();
2633 capacity = TaggedToParameter(capacity, mode); 2569 capacity = TaggedToParameter(capacity, mode);
2634 key = TaggedToParameter(key, mode); 2570 key = TaggedToParameter(key, mode);
2635 2571
2636 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, 2572 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
2637 bailout); 2573 bailout);
2638 } 2574 }
2639 2575
2640 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, 2576 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2641 ElementsKind kind, Node* key, 2577 ElementsKind kind, Node* key,
2642 Node* capacity, 2578 Node* capacity,
2643 ParameterMode mode, 2579 ParameterMode mode,
2644 Label* bailout) { 2580 Label* bailout) {
2645 Comment("TryGrowElementsCapacity"); 2581 Comment("TryGrowElementsCapacity");
2646 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2647 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
2648 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2649 CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
2650 2582
2651 // If the gap growth is too big, fall back to the runtime. 2583 // If the gap growth is too big, fall back to the runtime.
2652 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); 2584 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
2653 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); 2585 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
2654 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); 2586 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
2655 2587
2656 // Calculate the capacity of the new backing store. 2588 // Calculate the capacity of the new backing store.
2657 Node* new_capacity = CalculateNewElementsCapacity( 2589 Node* new_capacity = CalculateNewElementsCapacity(
2658 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); 2590 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
2659 return GrowElementsCapacity(object, elements, kind, kind, capacity, 2591 return GrowElementsCapacity(object, elements, kind, kind, capacity,
2660 new_capacity, mode, bailout); 2592 new_capacity, mode, bailout);
2661 } 2593 }
2662 2594
2663 Node* CodeStubAssembler::GrowElementsCapacity( 2595 Node* CodeStubAssembler::GrowElementsCapacity(
2664 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, 2596 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
2665 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { 2597 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
2666 Comment("[ GrowElementsCapacity"); 2598 Comment("[ GrowElementsCapacity");
2667 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2668 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
2669 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2670 CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
2671
2672 // If size of the allocation for the new capacity doesn't fit in a page 2599 // If size of the allocation for the new capacity doesn't fit in a page
2673 // that we can bump-pointer allocate from, fall back to the runtime. 2600 // that we can bump-pointer allocate from, fall back to the runtime.
2674 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); 2601 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
2675 GotoIf(UintPtrOrSmiGreaterThanOrEqual( 2602 GotoIf(UintPtrOrSmiGreaterThanOrEqual(
2676 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), 2603 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
2677 bailout); 2604 bailout);
2678 2605
2679 // Allocate the new backing store. 2606 // Allocate the new backing store.
2680 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); 2607 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
2681 2608
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
2997 // The {value} is a Smi, convert it to a String. 2924 // The {value} is a Smi, convert it to a String.
2998 Callable callable = CodeFactory::NumberToString(isolate()); 2925 Callable callable = CodeFactory::NumberToString(isolate());
2999 var_value.Bind(CallStub(callable, context, value)); 2926 var_value.Bind(CallStub(callable, context, value));
3000 Goto(&if_valueisstring); 2927 Goto(&if_valueisstring);
3001 } 2928 }
3002 BIND(&if_valueisstring); 2929 BIND(&if_valueisstring);
3003 return var_value.value(); 2930 return var_value.value();
3004 } 2931 }
3005 2932
3006 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { 2933 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) {
3007 CSA_SLOW_ASSERT(this, IsNumber(value));
3008 VARIABLE(result, MachineRepresentation::kFloat64); 2934 VARIABLE(result, MachineRepresentation::kFloat64);
3009 Label smi(this); 2935 Label smi(this);
3010 Label done(this, &result); 2936 Label done(this, &result);
3011 GotoIf(TaggedIsSmi(value), &smi); 2937 GotoIf(TaggedIsSmi(value), &smi);
3012 result.Bind( 2938 result.Bind(
3013 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); 2939 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
3014 Goto(&done); 2940 Goto(&done);
3015 2941
3016 BIND(&smi); 2942 BIND(&smi);
3017 { 2943 {
3018 result.Bind(SmiToFloat64(value)); 2944 result.Bind(SmiToFloat64(value));
3019 Goto(&done); 2945 Goto(&done);
3020 } 2946 }
3021 2947
3022 BIND(&done); 2948 BIND(&done);
3023 return result.value(); 2949 return result.value();
3024 } 2950 }
3025 2951
3026 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { 2952 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) {
3027 CSA_SLOW_ASSERT(this, IsNumber(value));
3028 VARIABLE(result, MachineType::PointerRepresentation()); 2953 VARIABLE(result, MachineType::PointerRepresentation());
3029 Label smi(this), done(this, &result); 2954 Label smi(this), done(this, &result);
3030 GotoIf(TaggedIsSmi(value), &smi); 2955 GotoIf(TaggedIsSmi(value), &smi);
3031 2956
3032 CSA_ASSERT(this, IsHeapNumber(value)); 2957 CSA_ASSERT(this, IsHeapNumber(value));
3033 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); 2958 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value)));
3034 Goto(&done); 2959 Goto(&done);
3035 2960
3036 BIND(&smi); 2961 BIND(&smi);
3037 result.Bind(SmiToWord(value)); 2962 result.Bind(SmiToWord(value));
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
3159 3084
3160 BIND(&out); 3085 BIND(&out);
3161 return var_value_map.value(); 3086 return var_value_map.value();
3162 } 3087 }
3163 3088
3164 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { 3089 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
3165 return Word32Equal(instance_type, Int32Constant(type)); 3090 return Word32Equal(instance_type, Int32Constant(type));
3166 } 3091 }
3167 3092
3168 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { 3093 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
3169 CSA_SLOW_ASSERT(this, IsMap(map));
3170 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); 3094 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
3171 uint32_t mask = 3095 uint32_t mask =
3172 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; 3096 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
3173 USE(mask); 3097 USE(mask);
3174 // Interceptors or access checks imply special receiver. 3098 // Interceptors or access checks imply special receiver.
3175 CSA_ASSERT(this, 3099 CSA_ASSERT(this,
3176 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, 3100 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
3177 Int32Constant(1), MachineRepresentation::kWord32)); 3101 Int32Constant(1), MachineRepresentation::kWord32));
3178 return is_special; 3102 return is_special;
3179 } 3103 }
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
3286 3210
3287 Node* CodeStubAssembler::IsJSObject(Node* object) { 3211 Node* CodeStubAssembler::IsJSObject(Node* object) {
3288 return IsJSObjectMap(LoadMap(object)); 3212 return IsJSObjectMap(LoadMap(object));
3289 } 3213 }
3290 3214
3291 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) { 3215 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
3292 return Word32Equal(LoadInstanceType(object), 3216 return Word32Equal(LoadInstanceType(object),
3293 Int32Constant(JS_GLOBAL_PROXY_TYPE)); 3217 Int32Constant(JS_GLOBAL_PROXY_TYPE));
3294 } 3218 }
3295 3219
3296 Node* CodeStubAssembler::IsMap(Node* map) { return IsMetaMap(LoadMap(map)); } 3220 Node* CodeStubAssembler::IsMap(Node* map) {
3221 return HasInstanceType(map, MAP_TYPE);
3222 }
3297 3223
3298 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) { 3224 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) {
3299 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE)); 3225 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE));
3300 } 3226 }
3301 3227
3302 Node* CodeStubAssembler::IsJSValue(Node* object) { 3228 Node* CodeStubAssembler::IsJSValue(Node* object) {
3303 return IsJSValueMap(LoadMap(object)); 3229 return IsJSValueMap(LoadMap(object));
3304 } 3230 }
3305 3231
3306 Node* CodeStubAssembler::IsJSValueMap(Node* map) { 3232 Node* CodeStubAssembler::IsJSValueMap(Node* map) {
3307 return IsJSValueInstanceType(LoadMapInstanceType(map)); 3233 return IsJSValueInstanceType(LoadMapInstanceType(map));
3308 } 3234 }
3309 3235
3310 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) { 3236 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) {
3311 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)); 3237 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE));
3312 } 3238 }
3313 3239
3314 Node* CodeStubAssembler::IsJSArray(Node* object) { 3240 Node* CodeStubAssembler::IsJSArray(Node* object) {
3315 return IsJSArrayMap(LoadMap(object)); 3241 return IsJSArrayMap(LoadMap(object));
3316 } 3242 }
3317 3243
3318 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { 3244 Node* CodeStubAssembler::IsJSArrayMap(Node* map) {
3319 return IsJSArrayInstanceType(LoadMapInstanceType(map)); 3245 return IsJSArrayInstanceType(LoadMapInstanceType(map));
3320 } 3246 }
3321 3247
3322 Node* CodeStubAssembler::IsFixedArray(Node* object) {
3323 return HasInstanceType(object, FIXED_ARRAY_TYPE);
3324 }
3325
3326 // This complicated check is due to elements oddities. If a smi array is empty
3327 // after Array.p.shift, it is replaced by the empty array constant. If it is
3328 // later filled with a double element, we try to grow it but pass in a double
3329 // elements kind. Usually this would cause a size mismatch (since the source
3330 // fixed array has FAST_HOLEY_ELEMENTS and destination has
3331 // FAST_HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
3332 // source array is empty.
3333 // TODO(jgruber): It might we worth creating an empty_double_array constant to
3334 // simplify this case.
3335 Node* CodeStubAssembler::IsFixedArrayWithKindOrEmpty(Node* object,
3336 ElementsKind kind) {
3337 Label out(this);
3338 VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
3339
3340 GotoIf(IsFixedArrayWithKind(object, kind), &out);
3341
3342 Node* const length = LoadFixedArrayBaseLength(object);
3343 GotoIf(SmiEqual(length, SmiConstant(0)), &out);
3344
3345 var_result.Bind(Int32Constant(0));
3346 Goto(&out);
3347
3348 BIND(&out);
3349 return var_result.value();
3350 }
3351
3352 Node* CodeStubAssembler::IsFixedArrayWithKind(Node* object, ElementsKind kind) {
3353 if (IsFastDoubleElementsKind(kind)) {
3354 return IsFixedDoubleArray(object);
3355 } else {
3356 DCHECK(IsFastSmiOrObjectElementsKind(kind));
3357 return IsFixedArray(object);
3358 }
3359 }
3360
3361 Node* CodeStubAssembler::IsWeakCell(Node* object) { 3248 Node* CodeStubAssembler::IsWeakCell(Node* object) {
3362 return IsWeakCellMap(LoadMap(object)); 3249 return IsWeakCellMap(LoadMap(object));
3363 } 3250 }
3364 3251
3365 Node* CodeStubAssembler::IsBoolean(Node* object) { 3252 Node* CodeStubAssembler::IsBoolean(Node* object) {
3366 return IsBooleanMap(LoadMap(object)); 3253 return IsBooleanMap(LoadMap(object));
3367 } 3254 }
3368 3255
3369 Node* CodeStubAssembler::IsPropertyCell(Node* object) { 3256 Node* CodeStubAssembler::IsPropertyCell(Node* object) {
3370 return IsPropertyCellMap(LoadMap(object)); 3257 return IsPropertyCellMap(LoadMap(object));
3371 } 3258 }
3372 3259
3373 Node* CodeStubAssembler::IsAccessorInfo(Node* object) { 3260 Node* CodeStubAssembler::IsAccessorInfo(Node* object) {
3374 return IsAccessorInfoMap(LoadMap(object)); 3261 return IsAccessorInfoMap(LoadMap(object));
3375 } 3262 }
3376 3263
3377 Node* CodeStubAssembler::IsAccessorPair(Node* object) { 3264 Node* CodeStubAssembler::IsAccessorPair(Node* object) {
3378 return IsAccessorPairMap(LoadMap(object)); 3265 return IsAccessorPairMap(LoadMap(object));
3379 } 3266 }
3380 3267
3381 Node* CodeStubAssembler::IsAnyHeapNumber(Node* object) {
3382 return Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object));
3383 }
3384
3385 Node* CodeStubAssembler::IsMutableHeapNumber(Node* object) {
3386 return IsMutableHeapNumberMap(LoadMap(object));
3387 }
3388
3389 Node* CodeStubAssembler::IsHeapNumber(Node* object) { 3268 Node* CodeStubAssembler::IsHeapNumber(Node* object) {
3390 return IsHeapNumberMap(LoadMap(object)); 3269 return IsHeapNumberMap(LoadMap(object));
3391 } 3270 }
3392 3271
3393 Node* CodeStubAssembler::IsFeedbackVector(Node* object) { 3272 Node* CodeStubAssembler::IsFeedbackVector(Node* object) {
3394 return IsFeedbackVectorMap(LoadMap(object)); 3273 return IsFeedbackVectorMap(LoadMap(object));
3395 } 3274 }
3396 3275
3397 Node* CodeStubAssembler::IsName(Node* object) { 3276 Node* CodeStubAssembler::IsName(Node* object) {
3398 return Int32LessThanOrEqual(LoadInstanceType(object), 3277 return Int32LessThanOrEqual(LoadInstanceType(object),
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
3510 [=] { return TaggedIsPositiveSmi(number); }, 3389 [=] { return TaggedIsPositiveSmi(number); },
3511 [=] { 3390 [=] {
3512 Node* v = LoadHeapNumberValue(number); 3391 Node* v = LoadHeapNumberValue(number);
3513 return Float64GreaterThanOrEqual(v, float_zero); 3392 return Float64GreaterThanOrEqual(v, float_zero);
3514 }, 3393 },
3515 MachineRepresentation::kWord32); 3394 MachineRepresentation::kWord32);
3516 } 3395 }
3517 3396
3518 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, 3397 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
3519 ParameterMode parameter_mode) { 3398 ParameterMode parameter_mode) {
3520 CSA_ASSERT(this, MatchesParameterMode(index, parameter_mode)); 3399 if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index));
3521 CSA_ASSERT(this, IsString(string)); 3400 CSA_ASSERT(this, IsString(string));
3522 3401
3523 // Translate the {index} into a Word. 3402 // Translate the {index} into a Word.
3524 Node* const int_index = ParameterToWord(index, parameter_mode); 3403 Node* const int_index = ParameterToWord(index, parameter_mode);
3525 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); 3404 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0)));
3526 3405
3527 VARIABLE(var_result, MachineRepresentation::kWord32); 3406 VARIABLE(var_result, MachineRepresentation::kWord32);
3528 3407
3529 Label out(this, &var_result), runtime_generic(this), runtime_external(this); 3408 Label out(this, &var_result), runtime_generic(this), runtime_external(this);
3530 3409
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
3622 // Allocate a new SeqTwoByteString for {code}. 3501 // Allocate a new SeqTwoByteString for {code}.
3623 Node* result = AllocateSeqTwoByteString(1); 3502 Node* result = AllocateSeqTwoByteString(1);
3624 StoreNoWriteBarrier( 3503 StoreNoWriteBarrier(
3625 MachineRepresentation::kWord16, result, 3504 MachineRepresentation::kWord16, result,
3626 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); 3505 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
3627 var_result.Bind(result); 3506 var_result.Bind(result);
3628 Goto(&if_done); 3507 Goto(&if_done);
3629 } 3508 }
3630 3509
3631 BIND(&if_done); 3510 BIND(&if_done);
3632 CSA_ASSERT(this, IsString(var_result.value()));
3633 return var_result.value(); 3511 return var_result.value();
3634 } 3512 }
3635 3513
3636 // A wrapper around CopyStringCharacters which determines the correct string 3514 // A wrapper around CopyStringCharacters which determines the correct string
3637 // encoding, allocates a corresponding sequential string, and then copies the 3515 // encoding, allocates a corresponding sequential string, and then copies the
3638 // given character range using CopyStringCharacters. 3516 // given character range using CopyStringCharacters.
3639 // |from_string| must be a sequential string. |from_index| and 3517 // |from_string| must be a sequential string. |from_index| and
3640 // |character_count| must be Smis s.t. 3518 // |character_count| must be Smis s.t.
3641 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length. 3519 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
3642 Node* CodeStubAssembler::AllocAndCopyStringCharacters(Node* context, Node* from, 3520 Node* CodeStubAssembler::AllocAndCopyStringCharacters(Node* context, Node* from,
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
3825 3703
3826 // Fall back to a runtime call. 3704 // Fall back to a runtime call.
3827 BIND(&runtime); 3705 BIND(&runtime);
3828 { 3706 {
3829 var_result.Bind( 3707 var_result.Bind(
3830 CallRuntime(Runtime::kSubString, context, string, from, to)); 3708 CallRuntime(Runtime::kSubString, context, string, from, to));
3831 Goto(&end); 3709 Goto(&end);
3832 } 3710 }
3833 3711
3834 BIND(&end); 3712 BIND(&end);
3835 CSA_ASSERT(this, IsString(var_result.value()));
3836 return var_result.value(); 3713 return var_result.value();
3837 } 3714 }
3838 3715
3839 ToDirectStringAssembler::ToDirectStringAssembler( 3716 ToDirectStringAssembler::ToDirectStringAssembler(
3840 compiler::CodeAssemblerState* state, Node* string, Flags flags) 3717 compiler::CodeAssemblerState* state, Node* string, Flags flags)
3841 : CodeStubAssembler(state), 3718 : CodeStubAssembler(state),
3842 var_string_(this, MachineRepresentation::kTagged, string), 3719 var_string_(this, MachineRepresentation::kTagged, string),
3843 var_instance_type_(this, MachineRepresentation::kWord32), 3720 var_instance_type_(this, MachineRepresentation::kWord32),
3844 var_offset_(this, MachineType::PointerRepresentation()), 3721 var_offset_(this, MachineType::PointerRepresentation()),
3845 var_is_external_(this, MachineRepresentation::kWord32), 3722 var_is_external_(this, MachineRepresentation::kWord32),
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
3973 kHeapObjectTag)); 3850 kHeapObjectTag));
3974 } 3851 }
3975 var_result.Bind(result); 3852 var_result.Bind(result);
3976 Goto(&out); 3853 Goto(&out);
3977 } 3854 }
3978 3855
3979 BIND(&out); 3856 BIND(&out);
3980 return var_result.value(); 3857 return var_result.value();
3981 } 3858 }
3982 3859
3860 Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
3861 Node* const instance_type,
3862 Label* if_bailout) {
3863 Label out(this);
3864
3865 CSA_ASSERT(this, IsExternalStringInstanceType(instance_type));
3866 GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout);
3867
3868 // Move the pointer so that offset-wise, it looks like a sequential string.
3869 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3870
3871 Node* resource_data = LoadObjectField(
3872 string, ExternalString::kResourceDataOffset, MachineType::Pointer());
3873 Node* const fake_sequential_string =
3874 IntPtrSub(resource_data,
3875 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3876
3877 return fake_sequential_string;
3878 }
3879
3983 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string, 3880 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
3984 Node* instance_type, 3881 Node* instance_type,
3985 Variable* var_did_something) { 3882 Variable* var_did_something) {
3986 Label deref(this), done(this, var_did_something); 3883 Label deref(this), done(this, var_did_something);
3987 Node* representation = 3884 Node* representation =
3988 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); 3885 Word32And(instance_type, Int32Constant(kStringRepresentationMask));
3989 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref); 3886 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref);
3990 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done); 3887 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done);
3991 // Cons string. 3888 // Cons string.
3992 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset); 3889 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset);
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
4178 Node* value = AllocateSeqTwoByteString(2); 4075 Node* value = AllocateSeqTwoByteString(2);
4179 StoreNoWriteBarrier( 4076 StoreNoWriteBarrier(
4180 MachineRepresentation::kWord32, value, 4077 MachineRepresentation::kWord32, value,
4181 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), 4078 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
4182 codepoint); 4079 codepoint);
4183 var_result.Bind(value); 4080 var_result.Bind(value);
4184 Goto(&return_result); 4081 Goto(&return_result);
4185 } 4082 }
4186 4083
4187 BIND(&return_result); 4084 BIND(&return_result);
4188 CSA_ASSERT(this, IsString(var_result.value()));
4189 return var_result.value(); 4085 return var_result.value();
4190 } 4086 }
4191 4087
4192 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { 4088 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
4193 CSA_SLOW_ASSERT(this, IsString(input));
4194 Label runtime(this, Label::kDeferred); 4089 Label runtime(this, Label::kDeferred);
4195 Label end(this); 4090 Label end(this);
4196 4091
4197 VARIABLE(var_result, MachineRepresentation::kTagged); 4092 VARIABLE(var_result, MachineRepresentation::kTagged);
4198 4093
4199 // Check if string has a cached array index. 4094 // Check if string has a cached array index.
4200 Node* hash = LoadNameHashField(input); 4095 Node* hash = LoadNameHashField(input);
4201 Node* bit = 4096 Node* bit =
4202 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); 4097 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
4203 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); 4098 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4238 GotoIfNot(IsHeapNumberMap(map), &runtime); 4133 GotoIfNot(IsHeapNumberMap(map), &runtime);
4239 4134
4240 // Make a hash from the two 32-bit values of the double. 4135 // Make a hash from the two 32-bit values of the double.
4241 Node* low = 4136 Node* low =
4242 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32()); 4137 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
4243 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize, 4138 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
4244 MachineType::Int32()); 4139 MachineType::Int32());
4245 Node* hash = Word32Xor(low, high); 4140 Node* hash = Word32Xor(low, high);
4246 hash = ChangeInt32ToIntPtr(hash); 4141 hash = ChangeInt32ToIntPtr(hash);
4247 hash = WordShl(hash, one); 4142 hash = WordShl(hash, one);
4248 Node* index = WordAnd(hash, WordSar(mask, SmiShiftBitsConstant())); 4143 Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
4249 4144
4250 // Cache entry's key must be a heap number 4145 // Cache entry's key must be a heap number
4251 Node* number_key = LoadFixedArrayElement(number_string_cache, index); 4146 Node* number_key = LoadFixedArrayElement(number_string_cache, index);
4252 GotoIf(TaggedIsSmi(number_key), &runtime); 4147 GotoIf(TaggedIsSmi(number_key), &runtime);
4253 map = LoadMap(number_key); 4148 map = LoadMap(number_key);
4254 GotoIfNot(IsHeapNumberMap(map), &runtime); 4149 GotoIfNot(IsHeapNumberMap(map), &runtime);
4255 4150
4256 // Cache entry's key must match the heap number value we're looking for. 4151 // Cache entry's key must match the heap number value we're looking for.
4257 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset, 4152 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
4258 MachineType::Int32()); 4153 MachineType::Int32());
(...skipping 24 matching lines...) Expand all
4283 GotoIf(WordNotEqual(smi_key, argument), &runtime); 4178 GotoIf(WordNotEqual(smi_key, argument), &runtime);
4284 4179
4285 // Smi match, return value from cache entry. 4180 // Smi match, return value from cache entry.
4286 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); 4181 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
4287 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, 4182 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
4288 kPointerSize, SMI_PARAMETERS)); 4183 kPointerSize, SMI_PARAMETERS));
4289 Goto(&done); 4184 Goto(&done);
4290 } 4185 }
4291 4186
4292 BIND(&done); 4187 BIND(&done);
4293 CSA_ASSERT(this, IsString(result.value()));
4294 return result.value(); 4188 return result.value();
4295 } 4189 }
4296 4190
4297 Node* CodeStubAssembler::ToName(Node* context, Node* value) { 4191 Node* CodeStubAssembler::ToName(Node* context, Node* value) {
4298 Label end(this); 4192 Label end(this);
4299 VARIABLE(var_result, MachineRepresentation::kTagged); 4193 VARIABLE(var_result, MachineRepresentation::kTagged);
4300 4194
4301 Label is_number(this); 4195 Label is_number(this);
4302 GotoIf(TaggedIsSmi(value), &is_number); 4196 GotoIf(TaggedIsSmi(value), &is_number);
4303 4197
(...skipping 26 matching lines...) Expand all
4330 Goto(&end); 4224 Goto(&end);
4331 4225
4332 BIND(&not_oddball); 4226 BIND(&not_oddball);
4333 { 4227 {
4334 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); 4228 var_result.Bind(CallRuntime(Runtime::kToName, context, value));
4335 Goto(&end); 4229 Goto(&end);
4336 } 4230 }
4337 } 4231 }
4338 4232
4339 BIND(&end); 4233 BIND(&end);
4340 CSA_ASSERT(this, IsName(var_result.value()));
4341 return var_result.value(); 4234 return var_result.value();
4342 } 4235 }
4343 4236
4344 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { 4237 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
4345 // Assert input is a HeapObject (not smi or heap number) 4238 // Assert input is a HeapObject (not smi or heap number)
4346 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); 4239 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
4347 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); 4240 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input))));
4348 4241
4349 // We might need to loop once here due to ToPrimitive conversions. 4242 // We might need to loop once here due to ToPrimitive conversions.
4350 VARIABLE(var_input, MachineRepresentation::kTagged, input); 4243 VARIABLE(var_input, MachineRepresentation::kTagged, input);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
4419 // Note: We cannot tail call to the runtime here, as js-to-wasm 4312 // Note: We cannot tail call to the runtime here, as js-to-wasm
4420 // trampolines also use this code currently, and they declare all 4313 // trampolines also use this code currently, and they declare all
4421 // outgoing parameters as untagged, while we would push a tagged 4314 // outgoing parameters as untagged, while we would push a tagged
4422 // object here. 4315 // object here.
4423 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); 4316 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
4424 Goto(&end); 4317 Goto(&end);
4425 } 4318 }
4426 } 4319 }
4427 4320
4428 BIND(&end); 4321 BIND(&end);
4429 CSA_ASSERT(this, IsNumber(var_result.value()));
4430 return var_result.value(); 4322 return var_result.value();
4431 } 4323 }
4432 4324
4433 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { 4325 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
4434 VARIABLE(var_result, MachineRepresentation::kTagged); 4326 VARIABLE(var_result, MachineRepresentation::kTagged);
4435 Label end(this); 4327 Label end(this);
4436 4328
4437 Label not_smi(this, Label::kDeferred); 4329 Label not_smi(this, Label::kDeferred);
4438 GotoIfNot(TaggedIsSmi(input), &not_smi); 4330 GotoIfNot(TaggedIsSmi(input), &not_smi);
4439 var_result.Bind(input); 4331 var_result.Bind(input);
4440 Goto(&end); 4332 Goto(&end);
4441 4333
4442 BIND(&not_smi); 4334 BIND(&not_smi);
4443 { 4335 {
4444 Label not_heap_number(this, Label::kDeferred); 4336 Label not_heap_number(this, Label::kDeferred);
4445 Node* input_map = LoadMap(input); 4337 Node* input_map = LoadMap(input);
4446 GotoIfNot(IsHeapNumberMap(input_map), &not_heap_number); 4338 GotoIfNot(IsHeapNumberMap(input_map), &not_heap_number);
4447 4339
4448 var_result.Bind(input); 4340 var_result.Bind(input);
4449 Goto(&end); 4341 Goto(&end);
4450 4342
4451 BIND(&not_heap_number); 4343 BIND(&not_heap_number);
4452 { 4344 {
4453 var_result.Bind(NonNumberToNumber(context, input)); 4345 var_result.Bind(NonNumberToNumber(context, input));
4454 Goto(&end); 4346 Goto(&end);
4455 } 4347 }
4456 } 4348 }
4457 4349
4458 BIND(&end); 4350 BIND(&end);
4459 CSA_ASSERT(this, IsNumber(var_result.value()));
4460 return var_result.value(); 4351 return var_result.value();
4461 } 4352 }
4462 4353
4463 // ES#sec-touint32 4354 // ES#sec-touint32
4464 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { 4355 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
4465 Node* const float_zero = Float64Constant(0.0); 4356 Node* const float_zero = Float64Constant(0.0);
4466 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); 4357 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
4467 4358
4468 Label out(this); 4359 Label out(this);
4469 4360
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
4552 } 4443 }
4553 4444
4554 BIND(&return_zero); 4445 BIND(&return_zero);
4555 { 4446 {
4556 var_result.Bind(SmiConstant(Smi::kZero)); 4447 var_result.Bind(SmiConstant(Smi::kZero));
4557 Goto(&out); 4448 Goto(&out);
4558 } 4449 }
4559 } 4450 }
4560 4451
4561 BIND(&out); 4452 BIND(&out);
4562 CSA_ASSERT(this, IsNumber(var_result.value()));
4563 return var_result.value(); 4453 return var_result.value();
4564 } 4454 }
4565 4455
4566 Node* CodeStubAssembler::ToString(Node* context, Node* input) { 4456 Node* CodeStubAssembler::ToString(Node* context, Node* input) {
4567 Label is_number(this); 4457 Label is_number(this);
4568 Label runtime(this, Label::kDeferred); 4458 Label runtime(this, Label::kDeferred);
4569 VARIABLE(result, MachineRepresentation::kTagged); 4459 VARIABLE(result, MachineRepresentation::kTagged);
4570 Label done(this, &result); 4460 Label done(this, &result);
4571 4461
4572 GotoIf(TaggedIsSmi(input), &is_number); 4462 GotoIf(TaggedIsSmi(input), &is_number);
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
4661 Goto(&negative_check); 4551 Goto(&negative_check);
4662 4552
4663 BIND(&negative_check); 4553 BIND(&negative_check);
4664 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); 4554 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
4665 4555
4666 BIND(&return_zero); 4556 BIND(&return_zero);
4667 result.Bind(SmiConstant(0)); 4557 result.Bind(SmiConstant(0));
4668 Goto(&done); 4558 Goto(&done);
4669 4559
4670 BIND(&done); 4560 BIND(&done);
4671 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
4672 return result.value(); 4561 return result.value();
4673 } 4562 }
4674 4563
4675 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, 4564 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context,
4676 Label* range_error) { 4565 Label* range_error) {
4677 VARIABLE(result, MachineRepresentation::kTagged, input); 4566 VARIABLE(result, MachineRepresentation::kTagged, input);
4678 Label to_integer(this), negative_check(this), return_zero(this), done(this); 4567 Label to_integer(this), negative_check(this), return_zero(this), done(this);
4679 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); 4568 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer);
4680 4569
4681 BIND(&to_integer); 4570 BIND(&to_integer);
4682 result.Bind(ToInteger(context, result.value(), 4571 result.Bind(ToInteger(context, result.value(),
4683 CodeStubAssembler::kTruncateMinusZero)); 4572 CodeStubAssembler::kTruncateMinusZero));
4684 GotoIfNot(TaggedIsSmi(result.value()), range_error); 4573 GotoIfNot(TaggedIsSmi(result.value()), range_error);
4685 CSA_ASSERT(this, TaggedIsSmi(result.value())); 4574 CSA_ASSERT(this, TaggedIsSmi(result.value()));
4686 Goto(&negative_check); 4575 Goto(&negative_check);
4687 4576
4688 BIND(&negative_check); 4577 BIND(&negative_check);
4689 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); 4578 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
4690 4579
4691 BIND(&return_zero); 4580 BIND(&return_zero);
4692 result.Bind(SmiConstant(0)); 4581 result.Bind(SmiConstant(0));
4693 Goto(&done); 4582 Goto(&done);
4694 4583
4695 BIND(&done); 4584 BIND(&done);
4696 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
4697 return result.value(); 4585 return result.value();
4698 } 4586 }
4699 4587
4700 Node* CodeStubAssembler::ToLength_Inline(Node* const context, 4588 Node* CodeStubAssembler::ToLength_Inline(Node* const context,
4701 Node* const input) { 4589 Node* const input) {
4702 Node* const smi_zero = SmiConstant(0); 4590 Node* const smi_zero = SmiConstant(0);
4703 return Select( 4591 return Select(
4704 TaggedIsSmi(input), [=] { return SmiMax(input, smi_zero); }, 4592 TaggedIsSmi(input), [=] { return SmiMax(input, smi_zero); },
4705 [=] { return CallBuiltin(Builtins::kToLength, context, input); }, 4593 [=] { return CallBuiltin(Builtins::kToLength, context, input); },
4706 MachineRepresentation::kTagged); 4594 MachineRepresentation::kTagged);
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
4756 var_arg.Bind(CallStub(callable, context, arg)); 4644 var_arg.Bind(CallStub(callable, context, arg));
4757 Goto(&loop); 4645 Goto(&loop);
4758 } 4646 }
4759 4647
4760 BIND(&return_zero); 4648 BIND(&return_zero);
4761 var_arg.Bind(SmiConstant(Smi::kZero)); 4649 var_arg.Bind(SmiConstant(Smi::kZero));
4762 Goto(&out); 4650 Goto(&out);
4763 } 4651 }
4764 4652
4765 BIND(&out); 4653 BIND(&out);
4766 CSA_SLOW_ASSERT(this, IsNumber(var_arg.value()));
4767 return var_arg.value(); 4654 return var_arg.value();
4768 } 4655 }
4769 4656
4770 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, 4657 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift,
4771 uint32_t mask) { 4658 uint32_t mask) {
4772 return Word32Shr(Word32And(word32, Int32Constant(mask)), 4659 return Word32Shr(Word32And(word32, Int32Constant(mask)),
4773 static_cast<int>(shift)); 4660 static_cast<int>(shift));
4774 } 4661 }
4775 4662
4776 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { 4663 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
4873 BIND(&if_hascachedindex); 4760 BIND(&if_hascachedindex);
4874 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); 4761 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
4875 Goto(if_keyisindex); 4762 Goto(if_keyisindex);
4876 } 4763 }
4877 4764
4878 void CodeStubAssembler::TryInternalizeString( 4765 void CodeStubAssembler::TryInternalizeString(
4879 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, 4766 Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
4880 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { 4767 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
4881 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); 4768 DCHECK(var_index->rep() == MachineType::PointerRepresentation());
4882 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); 4769 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged);
4883 CSA_SLOW_ASSERT(this, IsString(string));
4884 Node* function = ExternalConstant( 4770 Node* function = ExternalConstant(
4885 ExternalReference::try_internalize_string_function(isolate())); 4771 ExternalReference::try_internalize_string_function(isolate()));
4886 Node* result = CallCFunction1(MachineType::AnyTagged(), 4772 Node* result = CallCFunction1(MachineType::AnyTagged(),
4887 MachineType::AnyTagged(), function, string); 4773 MachineType::AnyTagged(), function, string);
4888 Label internalized(this); 4774 Label internalized(this);
4889 GotoIf(TaggedIsNotSmi(result), &internalized); 4775 GotoIf(TaggedIsNotSmi(result), &internalized);
4890 Node* word_result = SmiUntag(result); 4776 Node* word_result = SmiUntag(result);
4891 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), 4777 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
4892 if_not_internalized); 4778 if_not_internalized);
4893 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), 4779 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after
5121 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, 5007 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
5122 Node* index, Node* enum_index) { 5008 Node* index, Node* enum_index) {
5123 UNREACHABLE(); // Use specializations instead. 5009 UNREACHABLE(); // Use specializations instead.
5124 } 5010 }
5125 5011
5126 template <> 5012 template <>
5127 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, 5013 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
5128 Node* name, Node* value, 5014 Node* name, Node* value,
5129 Node* index, 5015 Node* index,
5130 Node* enum_index) { 5016 Node* enum_index) {
5131 CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
5132
5133 // Store name and value. 5017 // Store name and value.
5134 StoreFixedArrayElement(dictionary, index, name); 5018 StoreFixedArrayElement(dictionary, index, name);
5135 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); 5019 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
5136 5020
5137 // Prepare details of the new property. 5021 // Prepare details of the new property.
5138 const int kInitialIndex = 0; 5022 const int kInitialIndex = 0;
5139 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); 5023 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell);
5140 enum_index = 5024 enum_index =
5141 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); 5025 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
5142 STATIC_ASSERT(kInitialIndex == 0); 5026 STATIC_ASSERT(kInitialIndex == 0);
(...skipping 21 matching lines...) Expand all
5164 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, 5048 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
5165 Node* key, Node* value, 5049 Node* key, Node* value,
5166 Node* index, 5050 Node* index,
5167 Node* enum_index) { 5051 Node* enum_index) {
5168 UNIMPLEMENTED(); 5052 UNIMPLEMENTED();
5169 } 5053 }
5170 5054
5171 template <class Dictionary> 5055 template <class Dictionary>
5172 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, 5056 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
5173 Label* bailout) { 5057 Label* bailout) {
5174 CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
5175 Node* capacity = GetCapacity<Dictionary>(dictionary); 5058 Node* capacity = GetCapacity<Dictionary>(dictionary);
5176 Node* nof = GetNumberOfElements<Dictionary>(dictionary); 5059 Node* nof = GetNumberOfElements<Dictionary>(dictionary);
5177 Node* new_nof = SmiAdd(nof, SmiConstant(1)); 5060 Node* new_nof = SmiAdd(nof, SmiConstant(1));
5178 // Require 33% to still be free after adding additional_elements. 5061 // Require 33% to still be free after adding additional_elements.
5179 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! 5062 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
5180 // But that's OK here because it's only used for a comparison. 5063 // But that's OK here because it's only used for a comparison.
5181 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); 5064 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
5182 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); 5065 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
5183 // Require rehashing if more than 50% of free elements are deleted elements. 5066 // Require rehashing if more than 50% of free elements are deleted elements.
5184 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); 5067 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
(...skipping 1694 matching lines...) Expand 10 before | Expand all | Expand 10 after
6879 Goto(&end); 6762 Goto(&end);
6880 } 6763 }
6881 6764
6882 BIND(&end); 6765 BIND(&end);
6883 } 6766 }
6884 6767
6885 Node* CodeStubAssembler::BuildFastLoop( 6768 Node* CodeStubAssembler::BuildFastLoop(
6886 const CodeStubAssembler::VariableList& vars, Node* start_index, 6769 const CodeStubAssembler::VariableList& vars, Node* start_index,
6887 Node* end_index, const FastLoopBody& body, int increment, 6770 Node* end_index, const FastLoopBody& body, int increment,
6888 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { 6771 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
6889 CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
6890 CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
6891 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) 6772 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
6892 ? MachineType::PointerRepresentation() 6773 ? MachineType::PointerRepresentation()
6893 : MachineRepresentation::kTaggedSigned; 6774 : MachineRepresentation::kTaggedSigned;
6894 VARIABLE(var, index_rep, start_index); 6775 VARIABLE(var, index_rep, start_index);
6895 VariableList vars_copy(vars, zone()); 6776 VariableList vars_copy(vars, zone());
6896 vars_copy.Add(&var, zone()); 6777 vars_copy.Add(&var, zone());
6897 Label loop(this, vars_copy); 6778 Label loop(this, vars_copy);
6898 Label after_loop(this); 6779 Label after_loop(this);
6899 // Introduce an explicit second check of the termination condition before the 6780 // Introduce an explicit second check of the termination condition before the
6900 // loop that helps turbofan generate better code. If there's only a single 6781 // loop that helps turbofan generate better code. If there's only a single
(...skipping 17 matching lines...) Expand all
6918 BIND(&after_loop); 6799 BIND(&after_loop);
6919 return var.value(); 6800 return var.value();
6920 } 6801 }
6921 6802
6922 void CodeStubAssembler::BuildFastFixedArrayForEach( 6803 void CodeStubAssembler::BuildFastFixedArrayForEach(
6923 const CodeStubAssembler::VariableList& vars, Node* fixed_array, 6804 const CodeStubAssembler::VariableList& vars, Node* fixed_array,
6924 ElementsKind kind, Node* first_element_inclusive, 6805 ElementsKind kind, Node* first_element_inclusive,
6925 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, 6806 Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
6926 ParameterMode mode, ForEachDirection direction) { 6807 ParameterMode mode, ForEachDirection direction) {
6927 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 6808 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
6928 CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
6929 CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
6930 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(fixed_array, kind));
6931 int32_t first_val; 6809 int32_t first_val;
6932 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); 6810 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
6933 int32_t last_val; 6811 int32_t last_val;
6934 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); 6812 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
6935 if (constant_first && constent_last) { 6813 if (constant_first && constent_last) {
6936 int delta = last_val - first_val; 6814 int delta = last_val - first_val;
6937 DCHECK(delta >= 0); 6815 DCHECK(delta >= 0);
6938 if (delta <= kElementLoopUnrollThreshold) { 6816 if (delta <= kElementLoopUnrollThreshold) {
6939 if (direction == ForEachDirection::kForward) { 6817 if (direction == ForEachDirection::kForward) {
6940 for (int i = first_val; i < last_val; ++i) { 6818 for (int i = first_val; i < last_val; ++i) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
6981 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; 6859 (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
6982 GotoIf(IntPtrOrSmiGreaterThan( 6860 GotoIf(IntPtrOrSmiGreaterThan(
6983 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), 6861 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
6984 mode), 6862 mode),
6985 doesnt_fit); 6863 doesnt_fit);
6986 } 6864 }
6987 6865
6988 void CodeStubAssembler::InitializeFieldsWithRoot( 6866 void CodeStubAssembler::InitializeFieldsWithRoot(
6989 Node* object, Node* start_offset, Node* end_offset, 6867 Node* object, Node* start_offset, Node* end_offset,
6990 Heap::RootListIndex root_index) { 6868 Heap::RootListIndex root_index) {
6991 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
6992 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); 6869 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
6993 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); 6870 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
6994 Node* root_value = LoadRoot(root_index); 6871 Node* root_value = LoadRoot(root_index);
6995 BuildFastLoop(end_offset, start_offset, 6872 BuildFastLoop(end_offset, start_offset,
6996 [this, object, root_value](Node* current) { 6873 [this, object, root_value](Node* current) {
6997 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, 6874 StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
6998 current, root_value); 6875 current, root_value);
6999 }, 6876 },
7000 -kPointerSize, INTPTR_PARAMETERS, 6877 -kPointerSize, INTPTR_PARAMETERS,
7001 CodeStubAssembler::IndexAdvanceMode::kPre); 6878 CodeStubAssembler::IndexAdvanceMode::kPre);
7002 } 6879 }
7003 6880
7004 void CodeStubAssembler::BranchIfNumericRelationalComparison( 6881 void CodeStubAssembler::BranchIfNumericRelationalComparison(
7005 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, 6882 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
7006 Label* if_false) { 6883 Label* if_false) {
7007 CSA_SLOW_ASSERT(this, IsNumber(lhs));
7008 CSA_SLOW_ASSERT(this, IsNumber(rhs));
7009
7010 Label end(this); 6884 Label end(this);
7011 VARIABLE(result, MachineRepresentation::kTagged); 6885 VARIABLE(result, MachineRepresentation::kTagged);
7012 6886
7013 // Shared entry for floating point comparison. 6887 // Shared entry for floating point comparison.
7014 Label do_fcmp(this); 6888 Label do_fcmp(this);
7015 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); 6889 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
7016 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); 6890 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
7017 6891
7018 // Check if the {lhs} is a Smi or a HeapObject. 6892 // Check if the {lhs} is a Smi or a HeapObject.
7019 Label if_lhsissmi(this), if_lhsisnotsmi(this); 6893 Label if_lhsissmi(this), if_lhsisnotsmi(this);
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
7112 Label* if_false) { 6986 Label* if_false) {
7113 Label if_true(this); 6987 Label if_true(this);
7114 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); 6988 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false);
7115 BIND(&if_true); 6989 BIND(&if_true);
7116 } 6990 }
7117 6991
7118 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, 6992 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
7119 Node* lhs, Node* rhs, 6993 Node* lhs, Node* rhs,
7120 Node* context, 6994 Node* context,
7121 Variable* var_type_feedback) { 6995 Variable* var_type_feedback) {
7122 CSA_SLOW_ASSERT(this, IsNumber(lhs));
7123 CSA_SLOW_ASSERT(this, IsNumber(rhs));
7124
7125 Label return_true(this), return_false(this), end(this); 6996 Label return_true(this), return_false(this), end(this);
7126 VARIABLE(result, MachineRepresentation::kTagged); 6997 VARIABLE(result, MachineRepresentation::kTagged);
7127 6998
7128 // Shared entry for floating point comparison. 6999 // Shared entry for floating point comparison.
7129 Label do_fcmp(this); 7000 Label do_fcmp(this);
7130 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); 7001 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
7131 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); 7002 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
7132 7003
7133 // We might need to loop several times due to ToPrimitive and/or ToNumber 7004 // We might need to loop several times due to ToPrimitive and/or ToNumber
7134 // conversions. 7005 // conversions.
(...skipping 2099 matching lines...) Expand 10 before | Expand all | Expand 10 after
9234 Load(MachineType::Uint8(), 9105 Load(MachineType::Uint8(),
9235 ExternalConstant( 9106 ExternalConstant(
9236 ExternalReference::promise_hook_or_debug_is_active_address( 9107 ExternalReference::promise_hook_or_debug_is_active_address(
9237 isolate()))); 9108 isolate())));
9238 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); 9109 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
9239 } 9110 }
9240 9111
9241 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, 9112 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
9242 Node* shared_info, 9113 Node* shared_info,
9243 Node* context) { 9114 Node* context) {
9244 CSA_SLOW_ASSERT(this, IsMap(map));
9245
9246 Node* const code = BitcastTaggedToWord( 9115 Node* const code = BitcastTaggedToWord(
9247 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); 9116 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
9248 Node* const code_entry = 9117 Node* const code_entry =
9249 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); 9118 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
9250 9119
9251 Node* const fun = Allocate(JSFunction::kSize); 9120 Node* const fun = Allocate(JSFunction::kSize);
9252 StoreMapNoWriteBarrier(fun, map); 9121 StoreMapNoWriteBarrier(fun, map);
9253 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, 9122 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset,
9254 Heap::kEmptyFixedArrayRootIndex); 9123 Heap::kEmptyFixedArrayRootIndex);
9255 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, 9124 StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
9322 formatted.c_str(), TENURED); 9191 formatted.c_str(), TENURED);
9323 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), 9192 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
9324 HeapConstant(string)); 9193 HeapConstant(string));
9325 } 9194 }
9326 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); 9195 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
9327 #endif 9196 #endif
9328 } 9197 }
9329 9198
9330 } // namespace internal 9199 } // namespace internal
9331 } // namespace v8 9200 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | test/mjsunit/mjsunit.status » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698