Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(404)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2847923003: [csa] Add assertions to CSA (Closed)
Patch Set: Rebase & fix incorrect asserts Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | test/mjsunit/mjsunit.status » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
67 Branch(condition, &ok, &not_ok); 67 Branch(condition, &ok, &not_ok);
68 BIND(&not_ok); 68 BIND(&not_ok);
69 if (message != nullptr) { 69 if (message != nullptr) {
70 char chars[1024]; 70 char chars[1024];
71 Vector<char> buffer(chars); 71 Vector<char> buffer(chars);
72 if (file != nullptr) { 72 if (file != nullptr) {
73 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line); 73 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
74 } else { 74 } else {
75 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message); 75 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
76 } 76 }
77 CallRuntime( 77 CallRuntime(Runtime::kGlobalPrint, SmiConstant(0),
78 Runtime::kGlobalPrint, SmiConstant(Smi::kZero), 78 HeapConstant(factory()->InternalizeUtf8String(&(buffer[0]))));
79 HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
80 } 79 }
81 DebugBreak(); 80 DebugBreak();
82 Goto(&ok); 81 Goto(&ok);
83 BIND(&ok); 82 BIND(&ok);
84 Comment("] Assert"); 83 Comment("] Assert");
85 } 84 }
86 85
87 Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body, 86 Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body,
88 const NodeGenerator& false_body, 87 const NodeGenerator& false_body,
89 MachineRepresentation rep) { 88 MachineRepresentation rep) {
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { 190 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
192 Comment("IntPtrRoundUpToPowerOfTwo32"); 191 Comment("IntPtrRoundUpToPowerOfTwo32");
193 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); 192 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
194 value = IntPtrSub(value, IntPtrConstant(1)); 193 value = IntPtrSub(value, IntPtrConstant(1));
195 for (int i = 1; i <= 16; i *= 2) { 194 for (int i = 1; i <= 16; i *= 2) {
196 value = WordOr(value, WordShr(value, IntPtrConstant(i))); 195 value = WordOr(value, WordShr(value, IntPtrConstant(i)));
197 } 196 }
198 return IntPtrAdd(value, IntPtrConstant(1)); 197 return IntPtrAdd(value, IntPtrConstant(1));
199 } 198 }
200 199
200 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
201 return (mode == SMI_PARAMETERS) ? TaggedIsSmi(value) : Int32Constant(1);
202 }
203
201 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { 204 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
202 // value && !(value & (value - 1)) 205 // value && !(value & (value - 1))
203 return WordEqual( 206 return WordEqual(
204 Select( 207 Select(
205 WordEqual(value, IntPtrConstant(0)), 208 WordEqual(value, IntPtrConstant(0)),
206 [=] { return IntPtrConstant(1); }, 209 [=] { return IntPtrConstant(1); },
207 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, 210 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
208 MachineType::PointerRepresentation()), 211 MachineType::PointerRepresentation()),
209 IntPtrConstant(0)); 212 IntPtrConstant(0));
210 } 213 }
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
665 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) { 668 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
666 CodeAssembler::Bind(label, debug_info); 669 CodeAssembler::Bind(label, debug_info);
667 } 670 }
668 #else 671 #else
669 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); } 672 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
670 #endif // DEBUG 673 #endif // DEBUG
671 674
672 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( 675 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
673 Node* receiver_map, Label* definitely_no_elements, 676 Node* receiver_map, Label* definitely_no_elements,
674 Label* possibly_elements) { 677 Label* possibly_elements) {
678 CSA_SLOW_ASSERT(this, IsMap(receiver_map));
675 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); 679 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
676 Label loop_body(this, &var_map); 680 Label loop_body(this, &var_map);
677 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); 681 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
678 Goto(&loop_body); 682 Goto(&loop_body);
679 683
680 BIND(&loop_body); 684 BIND(&loop_body);
681 { 685 {
682 Node* map = var_map.value(); 686 Node* map = var_map.value();
683 Node* prototype = LoadMapPrototype(map); 687 Node* prototype = LoadMapPrototype(map);
684 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); 688 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
(...skipping 520 matching lines...) Expand 10 before | Expand all | Expand 10 after
1205 result.Bind( 1209 result.Bind(
1206 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); 1210 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
1207 Goto(&loop); 1211 Goto(&loop);
1208 } 1212 }
1209 BIND(&done); 1213 BIND(&done);
1210 return result.value(); 1214 return result.value();
1211 } 1215 }
1212 1216
1213 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( 1217 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
1214 Node* shared, int offset, ParameterMode mode) { 1218 Node* shared, int offset, ParameterMode mode) {
1219 CSA_SLOW_ASSERT(this, HasInstanceType(shared, SHARED_FUNCTION_INFO_TYPE));
1215 if (Is64()) { 1220 if (Is64()) {
1216 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); 1221 Node* result = LoadObjectField(shared, offset, MachineType::Int32());
1217 if (mode == SMI_PARAMETERS) { 1222 if (mode == SMI_PARAMETERS) {
1218 result = SmiTag(result); 1223 result = SmiTag(result);
1219 } else { 1224 } else {
1220 result = ChangeUint32ToWord(result); 1225 result = ChangeUint32ToWord(result);
1221 } 1226 }
1222 return result; 1227 return result;
1223 } else { 1228 } else {
1224 Node* result = LoadObjectField(shared, offset); 1229 Node* result = LoadObjectField(shared, offset);
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
1348 case ElementsKind::FLOAT64_ELEMENTS: 1353 case ElementsKind::FLOAT64_ELEMENTS:
1349 return AllocateHeapNumberWithValue(value); 1354 return AllocateHeapNumberWithValue(value);
1350 default: 1355 default:
1351 UNREACHABLE(); 1356 UNREACHABLE();
1352 } 1357 }
1353 } 1358 }
1354 1359
1355 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( 1360 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1356 Node* object, Node* index_node, int additional_offset, 1361 Node* object, Node* index_node, int additional_offset,
1357 ParameterMode parameter_mode) { 1362 ParameterMode parameter_mode) {
1363 CSA_SLOW_ASSERT(this, IsFixedArray(object));
1364 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1358 int32_t header_size = 1365 int32_t header_size =
1359 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; 1366 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1360 #if V8_TARGET_LITTLE_ENDIAN 1367 #if V8_TARGET_LITTLE_ENDIAN
1361 if (Is64()) { 1368 if (Is64()) {
1362 header_size += kPointerSize / 2; 1369 header_size += kPointerSize / 2;
1363 } 1370 }
1364 #endif 1371 #endif
1365 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, 1372 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1366 parameter_mode, header_size); 1373 parameter_mode, header_size);
1367 if (Is64()) { 1374 if (Is64()) {
1368 return Load(MachineType::Int32(), object, offset); 1375 return Load(MachineType::Int32(), object, offset);
1369 } else { 1376 } else {
1370 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); 1377 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1371 } 1378 }
1372 } 1379 }
1373 1380
1374 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( 1381 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1375 Node* object, Node* index_node, MachineType machine_type, 1382 Node* object, Node* index_node, MachineType machine_type,
1376 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { 1383 int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1384 CSA_SLOW_ASSERT(this, IsFixedDoubleArray(object));
1385 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1377 CSA_ASSERT(this, IsFixedDoubleArray(object)); 1386 CSA_ASSERT(this, IsFixedDoubleArray(object));
1378 int32_t header_size = 1387 int32_t header_size =
1379 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; 1388 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1380 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, 1389 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
1381 parameter_mode, header_size); 1390 parameter_mode, header_size);
1382 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); 1391 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1383 } 1392 }
1384 1393
1385 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, 1394 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1386 Label* if_hole, 1395 Label* if_hole,
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
1534 } else { 1543 } else {
1535 return StoreObjectField(object, offset, LoadRoot(root_index)); 1544 return StoreObjectField(object, offset, LoadRoot(root_index));
1536 } 1545 }
1537 } 1546 }
1538 1547
1539 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, 1548 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1540 Node* value, 1549 Node* value,
1541 WriteBarrierMode barrier_mode, 1550 WriteBarrierMode barrier_mode,
1542 int additional_offset, 1551 int additional_offset,
1543 ParameterMode parameter_mode) { 1552 ParameterMode parameter_mode) {
1553 CSA_SLOW_ASSERT(this, IsFixedArray(object));
1554 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1544 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || 1555 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1545 barrier_mode == UPDATE_WRITE_BARRIER); 1556 barrier_mode == UPDATE_WRITE_BARRIER);
1546 int header_size = 1557 int header_size =
1547 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; 1558 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1548 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, 1559 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1549 parameter_mode, header_size); 1560 parameter_mode, header_size);
1550 if (barrier_mode == SKIP_WRITE_BARRIER) { 1561 if (barrier_mode == SKIP_WRITE_BARRIER) {
1551 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, 1562 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1552 value); 1563 value);
1553 } else { 1564 } else {
1554 return Store(object, offset, value); 1565 return Store(object, offset, value);
1555 } 1566 }
1556 } 1567 }
1557 1568
1558 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( 1569 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1559 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { 1570 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1560 CSA_ASSERT(this, IsFixedDoubleArray(object)); 1571 CSA_ASSERT(this, IsFixedDoubleArray(object));
1572 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1561 Node* offset = 1573 Node* offset =
1562 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, 1574 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1563 FixedArray::kHeaderSize - kHeapObjectTag); 1575 FixedArray::kHeaderSize - kHeapObjectTag);
1564 MachineRepresentation rep = MachineRepresentation::kFloat64; 1576 MachineRepresentation rep = MachineRepresentation::kFloat64;
1565 return StoreNoWriteBarrier(rep, object, offset, value); 1577 return StoreNoWriteBarrier(rep, object, offset, value);
1566 } 1578 }
1567 1579
1568 void CodeStubAssembler::EnsureArrayLengthWritable(Node* map, Label* bailout) { 1580 void CodeStubAssembler::EnsureArrayLengthWritable(Node* map, Label* bailout) {
1569 // Check whether the length property is writable. The length property is the 1581 // Check whether the length property is writable. The length property is the
1570 // only default named property on arrays. It's nonconfigurable, hence is 1582 // only default named property on arrays. It's nonconfigurable, hence is
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1612 kind, capacity, new_capacity, mode, 1624 kind, capacity, new_capacity, mode,
1613 bailout)); 1625 bailout));
1614 Goto(&fits); 1626 Goto(&fits);
1615 BIND(&fits); 1627 BIND(&fits);
1616 } 1628 }
1617 1629
1618 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, 1630 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1619 CodeStubArguments& args, 1631 CodeStubArguments& args,
1620 Variable& arg_index, 1632 Variable& arg_index,
1621 Label* bailout) { 1633 Label* bailout) {
1634 CSA_SLOW_ASSERT(this, IsJSArray(array));
1622 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); 1635 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1623 Label pre_bailout(this); 1636 Label pre_bailout(this);
1624 Label success(this); 1637 Label success(this);
1625 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); 1638 VARIABLE(var_tagged_length, MachineRepresentation::kTagged);
1626 ParameterMode mode = OptimalParameterMode(); 1639 ParameterMode mode = OptimalParameterMode();
1627 VARIABLE(var_length, OptimalParameterRepresentation(), 1640 VARIABLE(var_length, OptimalParameterRepresentation(),
1628 TaggedToParameter(LoadJSArrayLength(array), mode)); 1641 TaggedToParameter(LoadJSArrayLength(array), mode));
1629 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); 1642 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1630 1643
1631 // Resize the capacity of the fixed array if it doesn't fit. 1644 // Resize the capacity of the fixed array if it doesn't fit.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1682 Float64SilenceNaN(double_value), mode); 1695 Float64SilenceNaN(double_value), mode);
1683 } else { 1696 } else {
1684 WriteBarrierMode barrier_mode = 1697 WriteBarrierMode barrier_mode =
1685 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; 1698 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
1686 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); 1699 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
1687 } 1700 }
1688 } 1701 }
1689 1702
1690 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, 1703 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1691 Node* value, Label* bailout) { 1704 Node* value, Label* bailout) {
1705 CSA_SLOW_ASSERT(this, IsJSArray(array));
1692 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); 1706 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1693 ParameterMode mode = OptimalParameterMode(); 1707 ParameterMode mode = OptimalParameterMode();
1694 VARIABLE(var_length, OptimalParameterRepresentation(), 1708 VARIABLE(var_length, OptimalParameterRepresentation(),
1695 TaggedToParameter(LoadJSArrayLength(array), mode)); 1709 TaggedToParameter(LoadJSArrayLength(array), mode));
1696 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); 1710 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1697 1711
1698 // Resize the capacity of the fixed array if it doesn't fit. 1712 // Resize the capacity of the fixed array if it doesn't fit.
1699 Node* growth = IntPtrOrSmiConstant(1, mode); 1713 Node* growth = IntPtrOrSmiConstant(1, mode);
1700 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), 1714 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1701 &var_elements, growth, bailout); 1715 &var_elements, growth, bailout);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1741 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, 1755 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1742 IntPtrConstant(String::kEmptyHashField), 1756 IntPtrConstant(String::kEmptyHashField),
1743 MachineType::PointerRepresentation()); 1757 MachineType::PointerRepresentation());
1744 return result; 1758 return result;
1745 } 1759 }
1746 1760
1747 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, 1761 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1748 ParameterMode mode, 1762 ParameterMode mode,
1749 AllocationFlags flags) { 1763 AllocationFlags flags) {
1750 Comment("AllocateSeqOneByteString"); 1764 Comment("AllocateSeqOneByteString");
1765 CSA_SLOW_ASSERT(this, IsFixedArray(context));
1766 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
1751 VARIABLE(var_result, MachineRepresentation::kTagged); 1767 VARIABLE(var_result, MachineRepresentation::kTagged);
1752 1768
1753 // Compute the SeqOneByteString size and check if it fits into new space. 1769 // Compute the SeqOneByteString size and check if it fits into new space.
1754 Label if_lengthiszero(this), if_sizeissmall(this), 1770 Label if_lengthiszero(this), if_sizeissmall(this),
1755 if_notsizeissmall(this, Label::kDeferred), if_join(this); 1771 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1756 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); 1772 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1757 1773
1758 Node* raw_size = GetArrayAllocationSize( 1774 Node* raw_size = GetArrayAllocationSize(
1759 length, UINT8_ELEMENTS, mode, 1775 length, UINT8_ELEMENTS, mode,
1760 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1776 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1811 // Initialize both used and unused parts of hash field slot at once. 1827 // Initialize both used and unused parts of hash field slot at once.
1812 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, 1828 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1813 IntPtrConstant(String::kEmptyHashField), 1829 IntPtrConstant(String::kEmptyHashField),
1814 MachineType::PointerRepresentation()); 1830 MachineType::PointerRepresentation());
1815 return result; 1831 return result;
1816 } 1832 }
1817 1833
1818 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, 1834 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1819 ParameterMode mode, 1835 ParameterMode mode,
1820 AllocationFlags flags) { 1836 AllocationFlags flags) {
1837 CSA_SLOW_ASSERT(this, IsFixedArray(context));
1838 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
1821 Comment("AllocateSeqTwoByteString"); 1839 Comment("AllocateSeqTwoByteString");
1822 VARIABLE(var_result, MachineRepresentation::kTagged); 1840 VARIABLE(var_result, MachineRepresentation::kTagged);
1823 1841
1824 // Compute the SeqTwoByteString size and check if it fits into new space. 1842 // Compute the SeqTwoByteString size and check if it fits into new space.
1825 Label if_lengthiszero(this), if_sizeissmall(this), 1843 Label if_lengthiszero(this), if_sizeissmall(this),
1826 if_notsizeissmall(this, Label::kDeferred), if_join(this); 1844 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1827 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); 1845 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1828 1846
1829 Node* raw_size = GetArrayAllocationSize( 1847 Node* raw_size = GetArrayAllocationSize(
1830 length, UINT16_ELEMENTS, mode, 1848 length, UINT16_ELEMENTS, mode,
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1866 Goto(&if_join); 1884 Goto(&if_join);
1867 } 1885 }
1868 1886
1869 BIND(&if_join); 1887 BIND(&if_join);
1870 return var_result.value(); 1888 return var_result.value();
1871 } 1889 }
1872 1890
1873 Node* CodeStubAssembler::AllocateSlicedString( 1891 Node* CodeStubAssembler::AllocateSlicedString(
1874 Heap::RootListIndex map_root_index, Node* length, Node* parent, 1892 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1875 Node* offset) { 1893 Node* offset) {
1894 CSA_ASSERT(this, IsString(parent));
1876 CSA_ASSERT(this, TaggedIsSmi(length)); 1895 CSA_ASSERT(this, TaggedIsSmi(length));
1896 CSA_ASSERT(this, TaggedIsSmi(offset));
1877 Node* result = Allocate(SlicedString::kSize); 1897 Node* result = Allocate(SlicedString::kSize);
1878 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1898 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1879 StoreMapNoWriteBarrier(result, map_root_index); 1899 StoreMapNoWriteBarrier(result, map_root_index);
1880 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, 1900 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1881 MachineRepresentation::kTagged); 1901 MachineRepresentation::kTagged);
1882 // Initialize both used and unused parts of hash field slot at once. 1902 // Initialize both used and unused parts of hash field slot at once.
1883 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, 1903 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
1884 IntPtrConstant(String::kEmptyHashField), 1904 IntPtrConstant(String::kEmptyHashField),
1885 MachineType::PointerRepresentation()); 1905 MachineType::PointerRepresentation());
1886 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, 1906 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
(...skipping 12 matching lines...) Expand all
1899 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, 1919 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1900 Node* offset) { 1920 Node* offset) {
1901 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, 1921 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1902 offset); 1922 offset);
1903 } 1923 }
1904 1924
1905 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, 1925 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1906 Node* length, Node* first, 1926 Node* length, Node* first,
1907 Node* second, 1927 Node* second,
1908 AllocationFlags flags) { 1928 AllocationFlags flags) {
1929 CSA_ASSERT(this, IsString(first));
1930 CSA_ASSERT(this, IsString(second));
1909 CSA_ASSERT(this, TaggedIsSmi(length)); 1931 CSA_ASSERT(this, TaggedIsSmi(length));
1910 Node* result = Allocate(ConsString::kSize, flags); 1932 Node* result = Allocate(ConsString::kSize, flags);
1911 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1933 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1912 StoreMapNoWriteBarrier(result, map_root_index); 1934 StoreMapNoWriteBarrier(result, map_root_index);
1913 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, 1935 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1914 MachineRepresentation::kTagged); 1936 MachineRepresentation::kTagged);
1915 // Initialize both used and unused parts of hash field slot at once. 1937 // Initialize both used and unused parts of hash field slot at once.
1916 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, 1938 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
1917 IntPtrConstant(String::kEmptyHashField), 1939 IntPtrConstant(String::kEmptyHashField),
1918 MachineType::PointerRepresentation()); 1940 MachineType::PointerRepresentation());
(...skipping 19 matching lines...) Expand all
1938 1960
1939 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, 1961 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
1940 Node* second, 1962 Node* second,
1941 AllocationFlags flags) { 1963 AllocationFlags flags) {
1942 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, 1964 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
1943 second, flags); 1965 second, flags);
1944 } 1966 }
1945 1967
1946 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, 1968 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
1947 Node* right, AllocationFlags flags) { 1969 Node* right, AllocationFlags flags) {
1970 CSA_ASSERT(this, IsFixedArray(context));
1971 CSA_ASSERT(this, IsString(left));
1972 CSA_ASSERT(this, IsString(right));
1948 CSA_ASSERT(this, TaggedIsSmi(length)); 1973 CSA_ASSERT(this, TaggedIsSmi(length));
1949 // Added string can be a cons string. 1974 // Added string can be a cons string.
1950 Comment("Allocating ConsString"); 1975 Comment("Allocating ConsString");
1951 Node* left_instance_type = LoadInstanceType(left); 1976 Node* left_instance_type = LoadInstanceType(left);
1952 Node* right_instance_type = LoadInstanceType(right); 1977 Node* right_instance_type = LoadInstanceType(right);
1953 1978
1954 // Compute intersection and difference of instance types. 1979 // Compute intersection and difference of instance types.
1955 Node* anded_instance_types = 1980 Node* anded_instance_types =
1956 Word32And(left_instance_type, right_instance_type); 1981 Word32And(left_instance_type, right_instance_type);
1957 Node* xored_instance_types = 1982 Node* xored_instance_types =
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1993 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); 2018 result.Bind(AllocateTwoByteConsString(length, left, right, flags));
1994 Goto(&done); 2019 Goto(&done);
1995 2020
1996 BIND(&done); 2021 BIND(&done);
1997 2022
1998 return result.value(); 2023 return result.value();
1999 } 2024 }
2000 2025
2001 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, 2026 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
2002 Node* index, Node* input) { 2027 Node* index, Node* input) {
2028 CSA_ASSERT(this, IsFixedArray(context));
2029 CSA_ASSERT(this, TaggedIsSmi(index));
2030 CSA_ASSERT(this, TaggedIsSmi(length));
2031 CSA_ASSERT(this, IsString(input));
2032
2033 #ifdef DEBUG
2003 Node* const max_length = 2034 Node* const max_length =
2004 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); 2035 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
2005 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); 2036 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
2006 USE(max_length); 2037 #endif // DEBUG
2007 2038
2008 // Allocate the JSRegExpResult. 2039 // Allocate the JSRegExpResult.
2009 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove 2040 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
2010 // unneeded store of elements. 2041 // unneeded store of elements.
2011 Node* const result = Allocate(JSRegExpResult::kSize); 2042 Node* const result = Allocate(JSRegExpResult::kSize);
2012 2043
2013 // TODO(jgruber): Store map as Heap constant? 2044 // TODO(jgruber): Store map as Heap constant?
2014 Node* const native_context = LoadNativeContext(context); 2045 Node* const native_context = LoadNativeContext(context);
2015 Node* const map = 2046 Node* const map =
2016 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); 2047 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
2117 Node* size = TimesPointerSize(LoadMapInstanceSize(map)); 2148 Node* size = TimesPointerSize(LoadMapInstanceSize(map));
2118 Node* object = AllocateInNewSpace(size, flags); 2149 Node* object = AllocateInNewSpace(size, flags);
2119 StoreMapNoWriteBarrier(object, map); 2150 StoreMapNoWriteBarrier(object, map);
2120 InitializeJSObjectFromMap(object, map, size, properties, elements); 2151 InitializeJSObjectFromMap(object, map, size, properties, elements);
2121 return object; 2152 return object;
2122 } 2153 }
2123 2154
2124 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, 2155 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
2125 Node* size, Node* properties, 2156 Node* size, Node* properties,
2126 Node* elements) { 2157 Node* elements) {
2158 CSA_SLOW_ASSERT(this, IsMap(map));
2127 // This helper assumes that the object is in new-space, as guarded by the 2159 // This helper assumes that the object is in new-space, as guarded by the
2128 // check in AllocatedJSObjectFromMap. 2160 // check in AllocatedJSObjectFromMap.
2129 if (properties == nullptr) { 2161 if (properties == nullptr) {
2130 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); 2162 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
2131 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, 2163 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
2132 Heap::kEmptyFixedArrayRootIndex); 2164 Heap::kEmptyFixedArrayRootIndex);
2133 } else { 2165 } else {
2166 CSA_ASSERT(this, IsFixedArray(properties));
2134 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, 2167 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
2135 properties); 2168 properties);
2136 } 2169 }
2137 if (elements == nullptr) { 2170 if (elements == nullptr) {
2138 StoreObjectFieldRoot(object, JSObject::kElementsOffset, 2171 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
2139 Heap::kEmptyFixedArrayRootIndex); 2172 Heap::kEmptyFixedArrayRootIndex);
2140 } else { 2173 } else {
2174 CSA_ASSERT(this, IsFixedArray(elements));
2141 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); 2175 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
2142 } 2176 }
2143 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); 2177 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
2144 } 2178 }
2145 2179
2146 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, 2180 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
2147 Node* size, int start_offset) { 2181 Node* size, int start_offset) {
2182 CSA_SLOW_ASSERT(this, IsMap(map));
2148 // TODO(cbruni): activate in-object slack tracking machinery. 2183 // TODO(cbruni): activate in-object slack tracking machinery.
2149 Comment("InitializeJSObjectBody"); 2184 Comment("InitializeJSObjectBody");
2150 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); 2185 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
2151 // Calculate the untagged field addresses. 2186 // Calculate the untagged field addresses.
2152 object = BitcastTaggedToWord(object); 2187 object = BitcastTaggedToWord(object);
2153 Node* start_address = 2188 Node* start_address =
2154 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); 2189 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
2155 Node* end_address = 2190 Node* end_address =
2156 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); 2191 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
2157 StoreFieldsNoWriteBarrier(start_address, end_address, filler); 2192 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2158 } 2193 }
2159 2194
2160 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, 2195 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
2161 Node* end_address, 2196 Node* end_address,
2162 Node* value) { 2197 Node* value) {
2163 Comment("StoreFieldsNoWriteBarrier"); 2198 Comment("StoreFieldsNoWriteBarrier");
2164 CSA_ASSERT(this, WordIsWordAligned(start_address)); 2199 CSA_ASSERT(this, WordIsWordAligned(start_address));
2165 CSA_ASSERT(this, WordIsWordAligned(end_address)); 2200 CSA_ASSERT(this, WordIsWordAligned(end_address));
2166 BuildFastLoop(start_address, end_address, 2201 BuildFastLoop(start_address, end_address,
2167 [this, value](Node* current) { 2202 [this, value](Node* current) {
2168 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, 2203 StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
2169 value); 2204 value);
2170 }, 2205 },
2171 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 2206 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2172 } 2207 }
2173 2208
2174 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( 2209 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
2175 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { 2210 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
2176 Comment("begin allocation of JSArray without elements"); 2211 Comment("begin allocation of JSArray without elements");
2212 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2213 CSA_SLOW_ASSERT(this, IsMap(array_map));
2177 int base_size = JSArray::kSize; 2214 int base_size = JSArray::kSize;
2178 if (allocation_site != nullptr) { 2215 if (allocation_site != nullptr) {
2179 base_size += AllocationMemento::kSize; 2216 base_size += AllocationMemento::kSize;
2180 } 2217 }
2181 2218
2182 Node* size = IntPtrConstant(base_size); 2219 Node* size = IntPtrConstant(base_size);
2183 Node* array = AllocateUninitializedJSArray(kind, array_map, length, 2220 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2184 allocation_site, size); 2221 allocation_site, size);
2185 return array; 2222 return array;
2186 } 2223 }
2187 2224
2188 std::pair<Node*, Node*> 2225 std::pair<Node*, Node*>
2189 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( 2226 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
2190 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, 2227 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
2191 Node* capacity, ParameterMode capacity_mode) { 2228 Node* capacity, ParameterMode capacity_mode) {
2192 Comment("begin allocation of JSArray with elements"); 2229 Comment("begin allocation of JSArray with elements");
2230 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2231 CSA_SLOW_ASSERT(this, IsMap(array_map));
2193 int base_size = JSArray::kSize; 2232 int base_size = JSArray::kSize;
2194 2233
2195 if (allocation_site != nullptr) { 2234 if (allocation_site != nullptr) {
2196 base_size += AllocationMemento::kSize; 2235 base_size += AllocationMemento::kSize;
2197 } 2236 }
2198 2237
2199 int elements_offset = base_size; 2238 int elements_offset = base_size;
2200 2239
2201 // Compute space for elements 2240 // Compute space for elements
2202 base_size += FixedArray::kHeaderSize; 2241 base_size += FixedArray::kHeaderSize;
2203 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); 2242 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
2204 2243
2205 Node* array = AllocateUninitializedJSArray(kind, array_map, length, 2244 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2206 allocation_site, size); 2245 allocation_site, size);
2207 2246
2208 Node* elements = InnerAllocate(array, elements_offset); 2247 Node* elements = InnerAllocate(array, elements_offset);
2209 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); 2248 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
2210 2249
2211 return {array, elements}; 2250 return {array, elements};
2212 } 2251 }
2213 2252
2214 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, 2253 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
2215 Node* array_map, 2254 Node* array_map,
2216 Node* length, 2255 Node* length,
2217 Node* allocation_site, 2256 Node* allocation_site,
2218 Node* size_in_bytes) { 2257 Node* size_in_bytes) {
2258 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2259 CSA_SLOW_ASSERT(this, IsMap(array_map));
2260
2219 // Allocate space for the JSArray and the elements FixedArray in one go. 2261 // Allocate space for the JSArray and the elements FixedArray in one go.
2220 Node* array = AllocateInNewSpace(size_in_bytes); 2262 Node* array = AllocateInNewSpace(size_in_bytes);
2221 2263
2222 Comment("write JSArray headers"); 2264 Comment("write JSArray headers");
2223 StoreMapNoWriteBarrier(array, array_map); 2265 StoreMapNoWriteBarrier(array, array_map);
2224 2266
2225 CSA_ASSERT(this, TaggedIsSmi(length));
2226 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); 2267 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2227 2268
2228 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, 2269 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
2229 Heap::kEmptyFixedArrayRootIndex); 2270 Heap::kEmptyFixedArrayRootIndex);
2230 2271
2231 if (allocation_site != nullptr) { 2272 if (allocation_site != nullptr) {
2232 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); 2273 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
2233 } 2274 }
2234 return array; 2275 return array;
2235 } 2276 }
2236 2277
2237 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, 2278 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
2238 Node* capacity, Node* length, 2279 Node* capacity, Node* length,
2239 Node* allocation_site, 2280 Node* allocation_site,
2240 ParameterMode capacity_mode) { 2281 ParameterMode capacity_mode) {
2282 CSA_SLOW_ASSERT(this, IsMap(array_map));
2283 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2284 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
2285
2241 Node *array = nullptr, *elements = nullptr; 2286 Node *array = nullptr, *elements = nullptr;
2242 if (IsIntPtrOrSmiConstantZero(capacity)) { 2287 if (IsIntPtrOrSmiConstantZero(capacity)) {
2243 // Array is empty. Use the shared empty fixed array instead of allocating a 2288 // Array is empty. Use the shared empty fixed array instead of allocating a
2244 // new one. 2289 // new one.
2245 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, 2290 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
2246 nullptr); 2291 nullptr);
2247 StoreObjectFieldRoot(array, JSArray::kElementsOffset, 2292 StoreObjectFieldRoot(array, JSArray::kElementsOffset,
2248 Heap::kEmptyFixedArrayRootIndex); 2293 Heap::kEmptyFixedArrayRootIndex);
2249 } else { 2294 } else {
2250 // Allocate both array and elements object, and initialize the JSArray. 2295 // Allocate both array and elements object, and initialize the JSArray.
(...skipping 13 matching lines...) Expand all
2264 Heap::kTheHoleValueRootIndex, capacity_mode); 2309 Heap::kTheHoleValueRootIndex, capacity_mode);
2265 } 2310 }
2266 2311
2267 return array; 2312 return array;
2268 } 2313 }
2269 2314
2270 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, 2315 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
2271 Node* capacity_node, 2316 Node* capacity_node,
2272 ParameterMode mode, 2317 ParameterMode mode,
2273 AllocationFlags flags) { 2318 AllocationFlags flags) {
2319 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
2274 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, 2320 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2275 IntPtrOrSmiConstant(0, mode), mode)); 2321 IntPtrOrSmiConstant(0, mode), mode));
2276 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); 2322 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
2277 2323
2278 if (IsFastDoubleElementsKind(kind)) flags |= kDoubleAlignment; 2324 if (IsFastDoubleElementsKind(kind)) flags |= kDoubleAlignment;
2279 // Allocate both array and elements object, and initialize the JSArray. 2325 // Allocate both array and elements object, and initialize the JSArray.
2280 Node* array = Allocate(total_size, flags); 2326 Node* array = Allocate(total_size, flags);
2281 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) 2327 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
2282 ? Heap::kFixedDoubleArrayMapRootIndex 2328 ? Heap::kFixedDoubleArrayMapRootIndex
2283 : Heap::kFixedArrayMapRootIndex; 2329 : Heap::kFixedArrayMapRootIndex;
2284 DCHECK(Heap::RootIsImmortalImmovable(map_index)); 2330 DCHECK(Heap::RootIsImmortalImmovable(map_index));
2285 StoreMapNoWriteBarrier(array, map_index); 2331 StoreMapNoWriteBarrier(array, map_index);
2286 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, 2332 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2287 ParameterToTagged(capacity_node, mode)); 2333 ParameterToTagged(capacity_node, mode));
2288 return array; 2334 return array;
2289 } 2335 }
2290 2336
2291 void CodeStubAssembler::FillFixedArrayWithValue( 2337 void CodeStubAssembler::FillFixedArrayWithValue(
2292 ElementsKind kind, Node* array, Node* from_node, Node* to_node, 2338 ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2293 Heap::RootListIndex value_root_index, ParameterMode mode) { 2339 Heap::RootListIndex value_root_index, ParameterMode mode) {
2340 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
2341 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
2342 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
2294 bool is_double = IsFastDoubleElementsKind(kind); 2343 bool is_double = IsFastDoubleElementsKind(kind);
2295 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || 2344 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
2296 value_root_index == Heap::kUndefinedValueRootIndex); 2345 value_root_index == Heap::kUndefinedValueRootIndex);
2297 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); 2346 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
2298 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); 2347 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2299 Node* double_hole = 2348 Node* double_hole =
2300 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); 2349 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2301 Node* value = LoadRoot(value_root_index); 2350 Node* value = LoadRoot(value_root_index);
2302 2351
2303 BuildFastFixedArrayForEach( 2352 BuildFastFixedArrayForEach(
(...skipping 23 matching lines...) Expand all
2327 value); 2376 value);
2328 } 2377 }
2329 }, 2378 },
2330 mode); 2379 mode);
2331 } 2380 }
2332 2381
2333 void CodeStubAssembler::CopyFixedArrayElements( 2382 void CodeStubAssembler::CopyFixedArrayElements(
2334 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, 2383 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
2335 Node* to_array, Node* element_count, Node* capacity, 2384 Node* to_array, Node* element_count, Node* capacity,
2336 WriteBarrierMode barrier_mode, ParameterMode mode) { 2385 WriteBarrierMode barrier_mode, ParameterMode mode) {
2386 CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
2387 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2388 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
2389 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
2337 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 2390 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
2338 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 2391 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
2339 Comment("[ CopyFixedArrayElements"); 2392 Comment("[ CopyFixedArrayElements");
2340 2393
2341 // Typed array elements are not supported. 2394 // Typed array elements are not supported.
2342 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); 2395 DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
2343 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); 2396 DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
2344 2397
2345 Label done(this); 2398 Label done(this);
2346 bool from_double_elements = IsFastDoubleElementsKind(from_kind); 2399 bool from_double_elements = IsFastDoubleElementsKind(from_kind);
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
2463 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); 2516 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
2464 Comment("] CopyFixedArrayElements"); 2517 Comment("] CopyFixedArrayElements");
2465 } 2518 }
2466 2519
2467 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, 2520 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
2468 Node* from_index, Node* to_index, 2521 Node* from_index, Node* to_index,
2469 Node* character_count, 2522 Node* character_count,
2470 String::Encoding from_encoding, 2523 String::Encoding from_encoding,
2471 String::Encoding to_encoding, 2524 String::Encoding to_encoding,
2472 ParameterMode mode) { 2525 ParameterMode mode) {
2526 // Cannot assert IsString(from_string) and IsString(to_string) here because
2527 // CSA::SubString can pass in faked sequential strings when handling external
2528 // subject strings.
2529 CSA_SLOW_ASSERT(this, MatchesParameterMode(character_count, mode));
2530 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_index, mode));
2531 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_index, mode));
2473 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; 2532 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
2474 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; 2533 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
2475 DCHECK_IMPLIES(to_one_byte, from_one_byte); 2534 DCHECK_IMPLIES(to_one_byte, from_one_byte);
2476 Comment("CopyStringCharacters %s -> %s", 2535 Comment("CopyStringCharacters %s -> %s",
2477 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", 2536 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
2478 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); 2537 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
2479 2538
2480 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; 2539 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2481 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; 2540 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2482 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); 2541 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2521 } 2580 }
2522 }, 2581 },
2523 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 2582 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2524 } 2583 }
2525 2584
2526 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, 2585 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2527 Node* offset, 2586 Node* offset,
2528 ElementsKind from_kind, 2587 ElementsKind from_kind,
2529 ElementsKind to_kind, 2588 ElementsKind to_kind,
2530 Label* if_hole) { 2589 Label* if_hole) {
2590 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
2531 if (IsFastDoubleElementsKind(from_kind)) { 2591 if (IsFastDoubleElementsKind(from_kind)) {
2532 Node* value = 2592 Node* value =
2533 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); 2593 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
2534 if (!IsFastDoubleElementsKind(to_kind)) { 2594 if (!IsFastDoubleElementsKind(to_kind)) {
2535 value = AllocateHeapNumberWithValue(value); 2595 value = AllocateHeapNumberWithValue(value);
2536 } 2596 }
2537 return value; 2597 return value;
2538 2598
2539 } else { 2599 } else {
2540 Node* value = Load(MachineType::AnyTagged(), array, offset); 2600 Node* value = Load(MachineType::AnyTagged(), array, offset);
2541 if (if_hole) { 2601 if (if_hole) {
2542 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); 2602 GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
2543 } 2603 }
2544 if (IsFastDoubleElementsKind(to_kind)) { 2604 if (IsFastDoubleElementsKind(to_kind)) {
2545 if (IsFastSmiElementsKind(from_kind)) { 2605 if (IsFastSmiElementsKind(from_kind)) {
2546 value = SmiToFloat64(value); 2606 value = SmiToFloat64(value);
2547 } else { 2607 } else {
2548 value = LoadHeapNumberValue(value); 2608 value = LoadHeapNumberValue(value);
2549 } 2609 }
2550 } 2610 }
2551 return value; 2611 return value;
2552 } 2612 }
2553 } 2613 }
2554 2614
2555 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, 2615 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
2556 ParameterMode mode) { 2616 ParameterMode mode) {
2617 CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
2557 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); 2618 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
2558 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); 2619 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
2559 Node* padding = IntPtrOrSmiConstant(16, mode); 2620 Node* padding = IntPtrOrSmiConstant(16, mode);
2560 return IntPtrOrSmiAdd(new_capacity, padding, mode); 2621 return IntPtrOrSmiAdd(new_capacity, padding, mode);
2561 } 2622 }
2562 2623
2563 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, 2624 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2564 ElementsKind kind, Node* key, 2625 ElementsKind kind, Node* key,
2565 Label* bailout) { 2626 Label* bailout) {
2627 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2628 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
2629 CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
2566 Node* capacity = LoadFixedArrayBaseLength(elements); 2630 Node* capacity = LoadFixedArrayBaseLength(elements);
2567 2631
2568 ParameterMode mode = OptimalParameterMode(); 2632 ParameterMode mode = OptimalParameterMode();
2569 capacity = TaggedToParameter(capacity, mode); 2633 capacity = TaggedToParameter(capacity, mode);
2570 key = TaggedToParameter(key, mode); 2634 key = TaggedToParameter(key, mode);
2571 2635
2572 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, 2636 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
2573 bailout); 2637 bailout);
2574 } 2638 }
2575 2639
2576 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, 2640 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2577 ElementsKind kind, Node* key, 2641 ElementsKind kind, Node* key,
2578 Node* capacity, 2642 Node* capacity,
2579 ParameterMode mode, 2643 ParameterMode mode,
2580 Label* bailout) { 2644 Label* bailout) {
2581 Comment("TryGrowElementsCapacity"); 2645 Comment("TryGrowElementsCapacity");
2646 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2647 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
2648 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2649 CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
2582 2650
2583 // If the gap growth is too big, fall back to the runtime. 2651 // If the gap growth is too big, fall back to the runtime.
2584 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); 2652 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
2585 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); 2653 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
2586 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); 2654 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
2587 2655
2588 // Calculate the capacity of the new backing store. 2656 // Calculate the capacity of the new backing store.
2589 Node* new_capacity = CalculateNewElementsCapacity( 2657 Node* new_capacity = CalculateNewElementsCapacity(
2590 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); 2658 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
2591 return GrowElementsCapacity(object, elements, kind, kind, capacity, 2659 return GrowElementsCapacity(object, elements, kind, kind, capacity,
2592 new_capacity, mode, bailout); 2660 new_capacity, mode, bailout);
2593 } 2661 }
2594 2662
2595 Node* CodeStubAssembler::GrowElementsCapacity( 2663 Node* CodeStubAssembler::GrowElementsCapacity(
2596 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, 2664 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
2597 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { 2665 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
2598 Comment("[ GrowElementsCapacity"); 2666 Comment("[ GrowElementsCapacity");
2667 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
2668 CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
2669 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2670 CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
2671
2599 // If size of the allocation for the new capacity doesn't fit in a page 2672 // If size of the allocation for the new capacity doesn't fit in a page
2600 // that we can bump-pointer allocate from, fall back to the runtime. 2673 // that we can bump-pointer allocate from, fall back to the runtime.
2601 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); 2674 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
2602 GotoIf(UintPtrOrSmiGreaterThanOrEqual( 2675 GotoIf(UintPtrOrSmiGreaterThanOrEqual(
2603 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), 2676 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
2604 bailout); 2677 bailout);
2605 2678
2606 // Allocate the new backing store. 2679 // Allocate the new backing store.
2607 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); 2680 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
2608 2681
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
2924 // The {value} is a Smi, convert it to a String. 2997 // The {value} is a Smi, convert it to a String.
2925 Callable callable = CodeFactory::NumberToString(isolate()); 2998 Callable callable = CodeFactory::NumberToString(isolate());
2926 var_value.Bind(CallStub(callable, context, value)); 2999 var_value.Bind(CallStub(callable, context, value));
2927 Goto(&if_valueisstring); 3000 Goto(&if_valueisstring);
2928 } 3001 }
2929 BIND(&if_valueisstring); 3002 BIND(&if_valueisstring);
2930 return var_value.value(); 3003 return var_value.value();
2931 } 3004 }
2932 3005
2933 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { 3006 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) {
3007 CSA_SLOW_ASSERT(this, IsNumber(value));
2934 VARIABLE(result, MachineRepresentation::kFloat64); 3008 VARIABLE(result, MachineRepresentation::kFloat64);
2935 Label smi(this); 3009 Label smi(this);
2936 Label done(this, &result); 3010 Label done(this, &result);
2937 GotoIf(TaggedIsSmi(value), &smi); 3011 GotoIf(TaggedIsSmi(value), &smi);
2938 result.Bind( 3012 result.Bind(
2939 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); 3013 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
2940 Goto(&done); 3014 Goto(&done);
2941 3015
2942 BIND(&smi); 3016 BIND(&smi);
2943 { 3017 {
2944 result.Bind(SmiToFloat64(value)); 3018 result.Bind(SmiToFloat64(value));
2945 Goto(&done); 3019 Goto(&done);
2946 } 3020 }
2947 3021
2948 BIND(&done); 3022 BIND(&done);
2949 return result.value(); 3023 return result.value();
2950 } 3024 }
2951 3025
2952 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { 3026 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) {
3027 CSA_SLOW_ASSERT(this, IsNumber(value));
2953 VARIABLE(result, MachineType::PointerRepresentation()); 3028 VARIABLE(result, MachineType::PointerRepresentation());
2954 Label smi(this), done(this, &result); 3029 Label smi(this), done(this, &result);
2955 GotoIf(TaggedIsSmi(value), &smi); 3030 GotoIf(TaggedIsSmi(value), &smi);
2956 3031
2957 CSA_ASSERT(this, IsHeapNumber(value)); 3032 CSA_ASSERT(this, IsHeapNumber(value));
2958 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); 3033 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value)));
2959 Goto(&done); 3034 Goto(&done);
2960 3035
2961 BIND(&smi); 3036 BIND(&smi);
2962 result.Bind(SmiToWord(value)); 3037 result.Bind(SmiToWord(value));
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
3084 3159
3085 BIND(&out); 3160 BIND(&out);
3086 return var_value_map.value(); 3161 return var_value_map.value();
3087 } 3162 }
3088 3163
3089 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { 3164 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
3090 return Word32Equal(instance_type, Int32Constant(type)); 3165 return Word32Equal(instance_type, Int32Constant(type));
3091 } 3166 }
3092 3167
3093 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { 3168 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
3169 CSA_SLOW_ASSERT(this, IsMap(map));
3094 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); 3170 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
3095 uint32_t mask = 3171 uint32_t mask =
3096 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; 3172 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
3097 USE(mask); 3173 USE(mask);
3098 // Interceptors or access checks imply special receiver. 3174 // Interceptors or access checks imply special receiver.
3099 CSA_ASSERT(this, 3175 CSA_ASSERT(this,
3100 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, 3176 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
3101 Int32Constant(1), MachineRepresentation::kWord32)); 3177 Int32Constant(1), MachineRepresentation::kWord32));
3102 return is_special; 3178 return is_special;
3103 } 3179 }
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
3210 3286
3211 Node* CodeStubAssembler::IsJSObject(Node* object) { 3287 Node* CodeStubAssembler::IsJSObject(Node* object) {
3212 return IsJSObjectMap(LoadMap(object)); 3288 return IsJSObjectMap(LoadMap(object));
3213 } 3289 }
3214 3290
3215 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) { 3291 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
3216 return Word32Equal(LoadInstanceType(object), 3292 return Word32Equal(LoadInstanceType(object),
3217 Int32Constant(JS_GLOBAL_PROXY_TYPE)); 3293 Int32Constant(JS_GLOBAL_PROXY_TYPE));
3218 } 3294 }
3219 3295
3220 Node* CodeStubAssembler::IsMap(Node* map) { 3296 Node* CodeStubAssembler::IsMap(Node* map) { return IsMetaMap(LoadMap(map)); }
3221 return HasInstanceType(map, MAP_TYPE);
3222 }
3223 3297
3224 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) { 3298 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) {
3225 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE)); 3299 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE));
3226 } 3300 }
3227 3301
3228 Node* CodeStubAssembler::IsJSValue(Node* object) { 3302 Node* CodeStubAssembler::IsJSValue(Node* object) {
3229 return IsJSValueMap(LoadMap(object)); 3303 return IsJSValueMap(LoadMap(object));
3230 } 3304 }
3231 3305
3232 Node* CodeStubAssembler::IsJSValueMap(Node* map) { 3306 Node* CodeStubAssembler::IsJSValueMap(Node* map) {
3233 return IsJSValueInstanceType(LoadMapInstanceType(map)); 3307 return IsJSValueInstanceType(LoadMapInstanceType(map));
3234 } 3308 }
3235 3309
3236 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) { 3310 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) {
3237 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)); 3311 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE));
3238 } 3312 }
3239 3313
3240 Node* CodeStubAssembler::IsJSArray(Node* object) { 3314 Node* CodeStubAssembler::IsJSArray(Node* object) {
3241 return IsJSArrayMap(LoadMap(object)); 3315 return IsJSArrayMap(LoadMap(object));
3242 } 3316 }
3243 3317
3244 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { 3318 Node* CodeStubAssembler::IsJSArrayMap(Node* map) {
3245 return IsJSArrayInstanceType(LoadMapInstanceType(map)); 3319 return IsJSArrayInstanceType(LoadMapInstanceType(map));
3246 } 3320 }
3247 3321
3322 Node* CodeStubAssembler::IsFixedArray(Node* object) {
3323 return HasInstanceType(object, FIXED_ARRAY_TYPE);
3324 }
3325
3326 // This complicated check is due to elements oddities. If a smi array is empty
3327 // after Array.p.shift, it is replaced by the empty array constant. If it is
3328 // later filled with a double element, we try to grow it but pass in a double
3329 // elements kind. Usually this would cause a size mismatch (since the source
3330 // fixed array has FAST_HOLEY_ELEMENTS and destination has
3331 // FAST_HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
3332 // source array is empty.
3333 // TODO(jgruber): It might we worth creating an empty_double_array constant to
3334 // simplify this case.
3335 Node* CodeStubAssembler::IsFixedArrayWithKindOrEmpty(Node* object,
3336 ElementsKind kind) {
3337 Label out(this);
3338 VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
3339
3340 GotoIf(IsFixedArrayWithKind(object, kind), &out);
3341
3342 Node* const length = LoadFixedArrayBaseLength(object);
3343 GotoIf(SmiEqual(length, SmiConstant(0)), &out);
3344
3345 var_result.Bind(Int32Constant(0));
3346 Goto(&out);
3347
3348 BIND(&out);
3349 return var_result.value();
3350 }
3351
3352 Node* CodeStubAssembler::IsFixedArrayWithKind(Node* object, ElementsKind kind) {
3353 if (IsFastDoubleElementsKind(kind)) {
3354 return IsFixedDoubleArray(object);
3355 } else {
3356 DCHECK(IsFastSmiOrObjectElementsKind(kind));
3357 return IsFixedArray(object);
3358 }
3359 }
3360
3248 Node* CodeStubAssembler::IsWeakCell(Node* object) { 3361 Node* CodeStubAssembler::IsWeakCell(Node* object) {
3249 return IsWeakCellMap(LoadMap(object)); 3362 return IsWeakCellMap(LoadMap(object));
3250 } 3363 }
3251 3364
3252 Node* CodeStubAssembler::IsBoolean(Node* object) { 3365 Node* CodeStubAssembler::IsBoolean(Node* object) {
3253 return IsBooleanMap(LoadMap(object)); 3366 return IsBooleanMap(LoadMap(object));
3254 } 3367 }
3255 3368
3256 Node* CodeStubAssembler::IsPropertyCell(Node* object) { 3369 Node* CodeStubAssembler::IsPropertyCell(Node* object) {
3257 return IsPropertyCellMap(LoadMap(object)); 3370 return IsPropertyCellMap(LoadMap(object));
3258 } 3371 }
3259 3372
3260 Node* CodeStubAssembler::IsAccessorInfo(Node* object) { 3373 Node* CodeStubAssembler::IsAccessorInfo(Node* object) {
3261 return IsAccessorInfoMap(LoadMap(object)); 3374 return IsAccessorInfoMap(LoadMap(object));
3262 } 3375 }
3263 3376
3264 Node* CodeStubAssembler::IsAccessorPair(Node* object) { 3377 Node* CodeStubAssembler::IsAccessorPair(Node* object) {
3265 return IsAccessorPairMap(LoadMap(object)); 3378 return IsAccessorPairMap(LoadMap(object));
3266 } 3379 }
3267 3380
3381 Node* CodeStubAssembler::IsAnyHeapNumber(Node* object) {
3382 return Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object));
3383 }
3384
3268 Node* CodeStubAssembler::IsHeapNumber(Node* object) { 3385 Node* CodeStubAssembler::IsHeapNumber(Node* object) {
3269 return IsHeapNumberMap(LoadMap(object)); 3386 return IsHeapNumberMap(LoadMap(object));
3270 } 3387 }
3271 3388
3272 Node* CodeStubAssembler::IsMutableHeapNumber(Node* object) { 3389 Node* CodeStubAssembler::IsMutableHeapNumber(Node* object) {
3273 return IsMutableHeapNumberMap(LoadMap(object)); 3390 return IsMutableHeapNumberMap(LoadMap(object));
3274 } 3391 }
3275 3392
3276 Node* CodeStubAssembler::IsFeedbackVector(Node* object) { 3393 Node* CodeStubAssembler::IsFeedbackVector(Node* object) {
3277 return IsFeedbackVectorMap(LoadMap(object)); 3394 return IsFeedbackVectorMap(LoadMap(object));
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
3397 [=] { return TaggedIsPositiveSmi(number); }, 3514 [=] { return TaggedIsPositiveSmi(number); },
3398 [=] { 3515 [=] {
3399 Node* v = LoadHeapNumberValue(number); 3516 Node* v = LoadHeapNumberValue(number);
3400 return Float64GreaterThanOrEqual(v, float_zero); 3517 return Float64GreaterThanOrEqual(v, float_zero);
3401 }, 3518 },
3402 MachineRepresentation::kWord32); 3519 MachineRepresentation::kWord32);
3403 } 3520 }
3404 3521
3405 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, 3522 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
3406 ParameterMode parameter_mode) { 3523 ParameterMode parameter_mode) {
3407 if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index)); 3524 CSA_ASSERT(this, MatchesParameterMode(index, parameter_mode));
3408 CSA_ASSERT(this, IsString(string)); 3525 CSA_ASSERT(this, IsString(string));
3409 3526
3410 // Translate the {index} into a Word. 3527 // Translate the {index} into a Word.
3411 Node* const int_index = ParameterToWord(index, parameter_mode); 3528 Node* const int_index = ParameterToWord(index, parameter_mode);
3412 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); 3529 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0)));
3413 3530
3414 VARIABLE(var_result, MachineRepresentation::kWord32); 3531 VARIABLE(var_result, MachineRepresentation::kWord32);
3415 3532
3416 Label out(this, &var_result), runtime_generic(this), runtime_external(this); 3533 Label out(this, &var_result), runtime_generic(this), runtime_external(this);
3417 3534
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
3509 // Allocate a new SeqTwoByteString for {code}. 3626 // Allocate a new SeqTwoByteString for {code}.
3510 Node* result = AllocateSeqTwoByteString(1); 3627 Node* result = AllocateSeqTwoByteString(1);
3511 StoreNoWriteBarrier( 3628 StoreNoWriteBarrier(
3512 MachineRepresentation::kWord16, result, 3629 MachineRepresentation::kWord16, result,
3513 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); 3630 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
3514 var_result.Bind(result); 3631 var_result.Bind(result);
3515 Goto(&if_done); 3632 Goto(&if_done);
3516 } 3633 }
3517 3634
3518 BIND(&if_done); 3635 BIND(&if_done);
3636 CSA_ASSERT(this, IsString(var_result.value()));
3519 return var_result.value(); 3637 return var_result.value();
3520 } 3638 }
3521 3639
3522 // A wrapper around CopyStringCharacters which determines the correct string 3640 // A wrapper around CopyStringCharacters which determines the correct string
3523 // encoding, allocates a corresponding sequential string, and then copies the 3641 // encoding, allocates a corresponding sequential string, and then copies the
3524 // given character range using CopyStringCharacters. 3642 // given character range using CopyStringCharacters.
3525 // |from_string| must be a sequential string. |from_index| and 3643 // |from_string| must be a sequential string. |from_index| and
3526 // |character_count| must be Smis s.t. 3644 // |character_count| must be Smis s.t.
3527 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length. 3645 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
3528 Node* CodeStubAssembler::AllocAndCopyStringCharacters(Node* context, Node* from, 3646 Node* CodeStubAssembler::AllocAndCopyStringCharacters(Node* context, Node* from,
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
3711 3829
3712 // Fall back to a runtime call. 3830 // Fall back to a runtime call.
3713 BIND(&runtime); 3831 BIND(&runtime);
3714 { 3832 {
3715 var_result.Bind( 3833 var_result.Bind(
3716 CallRuntime(Runtime::kSubString, context, string, from, to)); 3834 CallRuntime(Runtime::kSubString, context, string, from, to));
3717 Goto(&end); 3835 Goto(&end);
3718 } 3836 }
3719 3837
3720 BIND(&end); 3838 BIND(&end);
3839 CSA_ASSERT(this, IsString(var_result.value()));
3721 return var_result.value(); 3840 return var_result.value();
3722 } 3841 }
3723 3842
3724 ToDirectStringAssembler::ToDirectStringAssembler( 3843 ToDirectStringAssembler::ToDirectStringAssembler(
3725 compiler::CodeAssemblerState* state, Node* string, Flags flags) 3844 compiler::CodeAssemblerState* state, Node* string, Flags flags)
3726 : CodeStubAssembler(state), 3845 : CodeStubAssembler(state),
3727 var_string_(this, MachineRepresentation::kTagged, string), 3846 var_string_(this, MachineRepresentation::kTagged, string),
3728 var_instance_type_(this, MachineRepresentation::kWord32), 3847 var_instance_type_(this, MachineRepresentation::kWord32),
3729 var_offset_(this, MachineType::PointerRepresentation()), 3848 var_offset_(this, MachineType::PointerRepresentation()),
3730 var_is_external_(this, MachineRepresentation::kWord32), 3849 var_is_external_(this, MachineRepresentation::kWord32),
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
3858 kHeapObjectTag)); 3977 kHeapObjectTag));
3859 } 3978 }
3860 var_result.Bind(result); 3979 var_result.Bind(result);
3861 Goto(&out); 3980 Goto(&out);
3862 } 3981 }
3863 3982
3864 BIND(&out); 3983 BIND(&out);
3865 return var_result.value(); 3984 return var_result.value();
3866 } 3985 }
3867 3986
3868 Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
3869 Node* const instance_type,
3870 Label* if_bailout) {
3871 Label out(this);
3872
3873 CSA_ASSERT(this, IsExternalStringInstanceType(instance_type));
3874 GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout);
3875
3876 // Move the pointer so that offset-wise, it looks like a sequential string.
3877 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3878
3879 Node* resource_data = LoadObjectField(
3880 string, ExternalString::kResourceDataOffset, MachineType::Pointer());
3881 Node* const fake_sequential_string =
3882 IntPtrSub(resource_data,
3883 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3884
3885 return fake_sequential_string;
3886 }
3887
3888 void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string, 3987 void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
3889 Node* instance_type, 3988 Node* instance_type,
3890 Label* can_deref, 3989 Label* can_deref,
3891 Label* cannot_deref) { 3990 Label* cannot_deref) {
3892 CSA_ASSERT(this, IsString(string)); 3991 CSA_ASSERT(this, IsString(string));
3893 Node* representation = 3992 Node* representation =
3894 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); 3993 Word32And(instance_type, Int32Constant(kStringRepresentationMask));
3895 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref); 3994 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
3896 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), 3995 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
3897 cannot_deref); 3996 cannot_deref);
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
4112 Node* value = AllocateSeqTwoByteString(2); 4211 Node* value = AllocateSeqTwoByteString(2);
4113 StoreNoWriteBarrier( 4212 StoreNoWriteBarrier(
4114 MachineRepresentation::kWord32, value, 4213 MachineRepresentation::kWord32, value,
4115 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), 4214 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
4116 codepoint); 4215 codepoint);
4117 var_result.Bind(value); 4216 var_result.Bind(value);
4118 Goto(&return_result); 4217 Goto(&return_result);
4119 } 4218 }
4120 4219
4121 BIND(&return_result); 4220 BIND(&return_result);
4221 CSA_ASSERT(this, IsString(var_result.value()));
4122 return var_result.value(); 4222 return var_result.value();
4123 } 4223 }
4124 4224
4125 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { 4225 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
4226 CSA_SLOW_ASSERT(this, IsString(input));
4126 Label runtime(this, Label::kDeferred); 4227 Label runtime(this, Label::kDeferred);
4127 Label end(this); 4228 Label end(this);
4128 4229
4129 VARIABLE(var_result, MachineRepresentation::kTagged); 4230 VARIABLE(var_result, MachineRepresentation::kTagged);
4130 4231
4131 // Check if string has a cached array index. 4232 // Check if string has a cached array index.
4132 Node* hash = LoadNameHashField(input); 4233 Node* hash = LoadNameHashField(input);
4133 Node* bit = 4234 Node* bit =
4134 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); 4235 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
4135 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); 4236 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4170 GotoIfNot(IsHeapNumberMap(map), &runtime); 4271 GotoIfNot(IsHeapNumberMap(map), &runtime);
4171 4272
4172 // Make a hash from the two 32-bit values of the double. 4273 // Make a hash from the two 32-bit values of the double.
4173 Node* low = 4274 Node* low =
4174 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32()); 4275 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
4175 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize, 4276 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
4176 MachineType::Int32()); 4277 MachineType::Int32());
4177 Node* hash = Word32Xor(low, high); 4278 Node* hash = Word32Xor(low, high);
4178 hash = ChangeInt32ToIntPtr(hash); 4279 hash = ChangeInt32ToIntPtr(hash);
4179 hash = WordShl(hash, one); 4280 hash = WordShl(hash, one);
4180 Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask))); 4281 Node* index = WordAnd(hash, WordSar(mask, SmiShiftBitsConstant()));
4181 4282
4182 // Cache entry's key must be a heap number 4283 // Cache entry's key must be a heap number
4183 Node* number_key = LoadFixedArrayElement(number_string_cache, index); 4284 Node* number_key = LoadFixedArrayElement(number_string_cache, index);
4184 GotoIf(TaggedIsSmi(number_key), &runtime); 4285 GotoIf(TaggedIsSmi(number_key), &runtime);
4185 map = LoadMap(number_key); 4286 map = LoadMap(number_key);
4186 GotoIfNot(IsHeapNumberMap(map), &runtime); 4287 GotoIfNot(IsHeapNumberMap(map), &runtime);
4187 4288
4188 // Cache entry's key must match the heap number value we're looking for. 4289 // Cache entry's key must match the heap number value we're looking for.
4189 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset, 4290 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
4190 MachineType::Int32()); 4291 MachineType::Int32());
(...skipping 24 matching lines...) Expand all
4215 GotoIf(WordNotEqual(smi_key, argument), &runtime); 4316 GotoIf(WordNotEqual(smi_key, argument), &runtime);
4216 4317
4217 // Smi match, return value from cache entry. 4318 // Smi match, return value from cache entry.
4218 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); 4319 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
4219 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, 4320 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
4220 kPointerSize, SMI_PARAMETERS)); 4321 kPointerSize, SMI_PARAMETERS));
4221 Goto(&done); 4322 Goto(&done);
4222 } 4323 }
4223 4324
4224 BIND(&done); 4325 BIND(&done);
4326 CSA_ASSERT(this, IsString(result.value()));
4225 return result.value(); 4327 return result.value();
4226 } 4328 }
4227 4329
4228 Node* CodeStubAssembler::ToName(Node* context, Node* value) { 4330 Node* CodeStubAssembler::ToName(Node* context, Node* value) {
4229 Label end(this); 4331 Label end(this);
4230 VARIABLE(var_result, MachineRepresentation::kTagged); 4332 VARIABLE(var_result, MachineRepresentation::kTagged);
4231 4333
4232 Label is_number(this); 4334 Label is_number(this);
4233 GotoIf(TaggedIsSmi(value), &is_number); 4335 GotoIf(TaggedIsSmi(value), &is_number);
4234 4336
(...skipping 26 matching lines...) Expand all
4261 Goto(&end); 4363 Goto(&end);
4262 4364
4263 BIND(&not_oddball); 4365 BIND(&not_oddball);
4264 { 4366 {
4265 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); 4367 var_result.Bind(CallRuntime(Runtime::kToName, context, value));
4266 Goto(&end); 4368 Goto(&end);
4267 } 4369 }
4268 } 4370 }
4269 4371
4270 BIND(&end); 4372 BIND(&end);
4373 CSA_ASSERT(this, IsName(var_result.value()));
4271 return var_result.value(); 4374 return var_result.value();
4272 } 4375 }
4273 4376
4274 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { 4377 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
4275 // Assert input is a HeapObject (not smi or heap number) 4378 // Assert input is a HeapObject (not smi or heap number)
4276 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); 4379 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
4277 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); 4380 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input))));
4278 4381
4279 // We might need to loop once here due to ToPrimitive conversions. 4382 // We might need to loop once here due to ToPrimitive conversions.
4280 VARIABLE(var_input, MachineRepresentation::kTagged, input); 4383 VARIABLE(var_input, MachineRepresentation::kTagged, input);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
4349 // Note: We cannot tail call to the runtime here, as js-to-wasm 4452 // Note: We cannot tail call to the runtime here, as js-to-wasm
4350 // trampolines also use this code currently, and they declare all 4453 // trampolines also use this code currently, and they declare all
4351 // outgoing parameters as untagged, while we would push a tagged 4454 // outgoing parameters as untagged, while we would push a tagged
4352 // object here. 4455 // object here.
4353 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); 4456 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
4354 Goto(&end); 4457 Goto(&end);
4355 } 4458 }
4356 } 4459 }
4357 4460
4358 BIND(&end); 4461 BIND(&end);
4462 CSA_ASSERT(this, IsNumber(var_result.value()));
4359 return var_result.value(); 4463 return var_result.value();
4360 } 4464 }
4361 4465
4362 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { 4466 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
4363 VARIABLE(var_result, MachineRepresentation::kTagged); 4467 VARIABLE(var_result, MachineRepresentation::kTagged);
4364 Label end(this); 4468 Label end(this);
4365 4469
4366 Label not_smi(this, Label::kDeferred); 4470 Label not_smi(this, Label::kDeferred);
4367 GotoIfNot(TaggedIsSmi(input), &not_smi); 4471 GotoIfNot(TaggedIsSmi(input), &not_smi);
4368 var_result.Bind(input); 4472 var_result.Bind(input);
4369 Goto(&end); 4473 Goto(&end);
4370 4474
4371 BIND(&not_smi); 4475 BIND(&not_smi);
4372 { 4476 {
4373 Label not_heap_number(this, Label::kDeferred); 4477 Label not_heap_number(this, Label::kDeferred);
4374 Node* input_map = LoadMap(input); 4478 Node* input_map = LoadMap(input);
4375 GotoIfNot(IsHeapNumberMap(input_map), &not_heap_number); 4479 GotoIfNot(IsHeapNumberMap(input_map), &not_heap_number);
4376 4480
4377 var_result.Bind(input); 4481 var_result.Bind(input);
4378 Goto(&end); 4482 Goto(&end);
4379 4483
4380 BIND(&not_heap_number); 4484 BIND(&not_heap_number);
4381 { 4485 {
4382 var_result.Bind(NonNumberToNumber(context, input)); 4486 var_result.Bind(NonNumberToNumber(context, input));
4383 Goto(&end); 4487 Goto(&end);
4384 } 4488 }
4385 } 4489 }
4386 4490
4387 BIND(&end); 4491 BIND(&end);
4492 CSA_ASSERT(this, IsNumber(var_result.value()));
4388 return var_result.value(); 4493 return var_result.value();
4389 } 4494 }
4390 4495
4391 // ES#sec-touint32 4496 // ES#sec-touint32
4392 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { 4497 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
4393 Node* const float_zero = Float64Constant(0.0); 4498 Node* const float_zero = Float64Constant(0.0);
4394 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); 4499 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
4395 4500
4396 Label out(this); 4501 Label out(this);
4397 4502
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
4480 } 4585 }
4481 4586
4482 BIND(&return_zero); 4587 BIND(&return_zero);
4483 { 4588 {
4484 var_result.Bind(SmiConstant(Smi::kZero)); 4589 var_result.Bind(SmiConstant(Smi::kZero));
4485 Goto(&out); 4590 Goto(&out);
4486 } 4591 }
4487 } 4592 }
4488 4593
4489 BIND(&out); 4594 BIND(&out);
4595 CSA_ASSERT(this, IsNumber(var_result.value()));
4490 return var_result.value(); 4596 return var_result.value();
4491 } 4597 }
4492 4598
4493 Node* CodeStubAssembler::ToString(Node* context, Node* input) { 4599 Node* CodeStubAssembler::ToString(Node* context, Node* input) {
4494 Label is_number(this); 4600 Label is_number(this);
4495 Label runtime(this, Label::kDeferred), done(this); 4601 Label runtime(this, Label::kDeferred), done(this);
4496 VARIABLE(result, MachineRepresentation::kTagged); 4602 VARIABLE(result, MachineRepresentation::kTagged);
4497 GotoIf(TaggedIsSmi(input), &is_number); 4603 GotoIf(TaggedIsSmi(input), &is_number);
4498 4604
4499 Node* input_map = LoadMap(input); 4605 Node* input_map = LoadMap(input);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
4586 Goto(&negative_check); 4692 Goto(&negative_check);
4587 4693
4588 BIND(&negative_check); 4694 BIND(&negative_check);
4589 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); 4695 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
4590 4696
4591 BIND(&return_zero); 4697 BIND(&return_zero);
4592 result.Bind(SmiConstant(0)); 4698 result.Bind(SmiConstant(0));
4593 Goto(&done); 4699 Goto(&done);
4594 4700
4595 BIND(&done); 4701 BIND(&done);
4702 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
4596 return result.value(); 4703 return result.value();
4597 } 4704 }
4598 4705
4599 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, 4706 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context,
4600 Label* range_error) { 4707 Label* range_error) {
4601 VARIABLE(result, MachineRepresentation::kTagged, input); 4708 VARIABLE(result, MachineRepresentation::kTagged, input);
4602 Label to_integer(this), negative_check(this), return_zero(this), done(this); 4709 Label to_integer(this), negative_check(this), return_zero(this), done(this);
4603 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); 4710 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer);
4604 4711
4605 BIND(&to_integer); 4712 BIND(&to_integer);
4606 result.Bind(ToInteger(context, result.value(), 4713 result.Bind(ToInteger(context, result.value(),
4607 CodeStubAssembler::kTruncateMinusZero)); 4714 CodeStubAssembler::kTruncateMinusZero));
4608 GotoIfNot(TaggedIsSmi(result.value()), range_error); 4715 GotoIfNot(TaggedIsSmi(result.value()), range_error);
4609 CSA_ASSERT(this, TaggedIsSmi(result.value())); 4716 CSA_ASSERT(this, TaggedIsSmi(result.value()));
4610 Goto(&negative_check); 4717 Goto(&negative_check);
4611 4718
4612 BIND(&negative_check); 4719 BIND(&negative_check);
4613 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); 4720 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
4614 4721
4615 BIND(&return_zero); 4722 BIND(&return_zero);
4616 result.Bind(SmiConstant(0)); 4723 result.Bind(SmiConstant(0));
4617 Goto(&done); 4724 Goto(&done);
4618 4725
4619 BIND(&done); 4726 BIND(&done);
4727 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
4620 return result.value(); 4728 return result.value();
4621 } 4729 }
4622 4730
4623 Node* CodeStubAssembler::ToLength_Inline(Node* const context, 4731 Node* CodeStubAssembler::ToLength_Inline(Node* const context,
4624 Node* const input) { 4732 Node* const input) {
4625 Node* const smi_zero = SmiConstant(0); 4733 Node* const smi_zero = SmiConstant(0);
4626 return Select( 4734 return Select(
4627 TaggedIsSmi(input), [=] { return SmiMax(input, smi_zero); }, 4735 TaggedIsSmi(input), [=] { return SmiMax(input, smi_zero); },
4628 [=] { return CallBuiltin(Builtins::kToLength, context, input); }, 4736 [=] { return CallBuiltin(Builtins::kToLength, context, input); },
4629 MachineRepresentation::kTagged); 4737 MachineRepresentation::kTagged);
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
4679 var_arg.Bind(CallStub(callable, context, arg)); 4787 var_arg.Bind(CallStub(callable, context, arg));
4680 Goto(&loop); 4788 Goto(&loop);
4681 } 4789 }
4682 4790
4683 BIND(&return_zero); 4791 BIND(&return_zero);
4684 var_arg.Bind(SmiConstant(Smi::kZero)); 4792 var_arg.Bind(SmiConstant(Smi::kZero));
4685 Goto(&out); 4793 Goto(&out);
4686 } 4794 }
4687 4795
4688 BIND(&out); 4796 BIND(&out);
4797 CSA_SLOW_ASSERT(this, IsNumber(var_arg.value()));
4689 return var_arg.value(); 4798 return var_arg.value();
4690 } 4799 }
4691 4800
4692 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, 4801 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift,
4693 uint32_t mask) { 4802 uint32_t mask) {
4694 return Word32Shr(Word32And(word32, Int32Constant(mask)), 4803 return Word32Shr(Word32And(word32, Int32Constant(mask)),
4695 static_cast<int>(shift)); 4804 static_cast<int>(shift));
4696 } 4805 }
4697 4806
4698 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { 4807 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
4795 BIND(&if_hascachedindex); 4904 BIND(&if_hascachedindex);
4796 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); 4905 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
4797 Goto(if_keyisindex); 4906 Goto(if_keyisindex);
4798 } 4907 }
4799 4908
4800 void CodeStubAssembler::TryInternalizeString( 4909 void CodeStubAssembler::TryInternalizeString(
4801 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, 4910 Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
4802 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { 4911 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
4803 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); 4912 DCHECK(var_index->rep() == MachineType::PointerRepresentation());
4804 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); 4913 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged);
4914 CSA_SLOW_ASSERT(this, IsString(string));
4805 Node* function = ExternalConstant( 4915 Node* function = ExternalConstant(
4806 ExternalReference::try_internalize_string_function(isolate())); 4916 ExternalReference::try_internalize_string_function(isolate()));
4807 Node* result = CallCFunction1(MachineType::AnyTagged(), 4917 Node* result = CallCFunction1(MachineType::AnyTagged(),
4808 MachineType::AnyTagged(), function, string); 4918 MachineType::AnyTagged(), function, string);
4809 Label internalized(this); 4919 Label internalized(this);
4810 GotoIf(TaggedIsNotSmi(result), &internalized); 4920 GotoIf(TaggedIsNotSmi(result), &internalized);
4811 Node* word_result = SmiUntag(result); 4921 Node* word_result = SmiUntag(result);
4812 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), 4922 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
4813 if_not_internalized); 4923 if_not_internalized);
4814 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), 4924 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after
5042 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, 5152 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
5043 Node* index, Node* enum_index) { 5153 Node* index, Node* enum_index) {
5044 UNREACHABLE(); // Use specializations instead. 5154 UNREACHABLE(); // Use specializations instead.
5045 } 5155 }
5046 5156
5047 template <> 5157 template <>
5048 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, 5158 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
5049 Node* name, Node* value, 5159 Node* name, Node* value,
5050 Node* index, 5160 Node* index,
5051 Node* enum_index) { 5161 Node* enum_index) {
5162 CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
5163
5052 // Store name and value. 5164 // Store name and value.
5053 StoreFixedArrayElement(dictionary, index, name); 5165 StoreFixedArrayElement(dictionary, index, name);
5054 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); 5166 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
5055 5167
5056 // Prepare details of the new property. 5168 // Prepare details of the new property.
5057 const int kInitialIndex = 0; 5169 const int kInitialIndex = 0;
5058 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); 5170 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell);
5059 enum_index = 5171 enum_index =
5060 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); 5172 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
5061 STATIC_ASSERT(kInitialIndex == 0); 5173 STATIC_ASSERT(kInitialIndex == 0);
(...skipping 21 matching lines...) Expand all
5083 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, 5195 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
5084 Node* key, Node* value, 5196 Node* key, Node* value,
5085 Node* index, 5197 Node* index,
5086 Node* enum_index) { 5198 Node* enum_index) {
5087 UNIMPLEMENTED(); 5199 UNIMPLEMENTED();
5088 } 5200 }
5089 5201
5090 template <class Dictionary> 5202 template <class Dictionary>
5091 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, 5203 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
5092 Label* bailout) { 5204 Label* bailout) {
5205 CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
5093 Node* capacity = GetCapacity<Dictionary>(dictionary); 5206 Node* capacity = GetCapacity<Dictionary>(dictionary);
5094 Node* nof = GetNumberOfElements<Dictionary>(dictionary); 5207 Node* nof = GetNumberOfElements<Dictionary>(dictionary);
5095 Node* new_nof = SmiAdd(nof, SmiConstant(1)); 5208 Node* new_nof = SmiAdd(nof, SmiConstant(1));
5096 // Require 33% to still be free after adding additional_elements. 5209 // Require 33% to still be free after adding additional_elements.
5097 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! 5210 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
5098 // But that's OK here because it's only used for a comparison. 5211 // But that's OK here because it's only used for a comparison.
5099 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); 5212 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
5100 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); 5213 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
5101 // Require rehashing if more than 50% of free elements are deleted elements. 5214 // Require rehashing if more than 50% of free elements are deleted elements.
5102 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); 5215 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
(...skipping 1692 matching lines...) Expand 10 before | Expand all | Expand 10 after
6795 Goto(&end); 6908 Goto(&end);
6796 } 6909 }
6797 6910
6798 BIND(&end); 6911 BIND(&end);
6799 } 6912 }
6800 6913
6801 Node* CodeStubAssembler::BuildFastLoop( 6914 Node* CodeStubAssembler::BuildFastLoop(
6802 const CodeStubAssembler::VariableList& vars, Node* start_index, 6915 const CodeStubAssembler::VariableList& vars, Node* start_index,
6803 Node* end_index, const FastLoopBody& body, int increment, 6916 Node* end_index, const FastLoopBody& body, int increment,
6804 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { 6917 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
6918 CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
6919 CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
6805 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) 6920 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
6806 ? MachineType::PointerRepresentation() 6921 ? MachineType::PointerRepresentation()
6807 : MachineRepresentation::kTaggedSigned; 6922 : MachineRepresentation::kTaggedSigned;
6808 VARIABLE(var, index_rep, start_index); 6923 VARIABLE(var, index_rep, start_index);
6809 VariableList vars_copy(vars, zone()); 6924 VariableList vars_copy(vars, zone());
6810 vars_copy.Add(&var, zone()); 6925 vars_copy.Add(&var, zone());
6811 Label loop(this, vars_copy); 6926 Label loop(this, vars_copy);
6812 Label after_loop(this); 6927 Label after_loop(this);
6813 // Introduce an explicit second check of the termination condition before the 6928 // Introduce an explicit second check of the termination condition before the
6814 // loop that helps turbofan generate better code. If there's only a single 6929 // loop that helps turbofan generate better code. If there's only a single
(...skipping 17 matching lines...) Expand all
6832 BIND(&after_loop); 6947 BIND(&after_loop);
6833 return var.value(); 6948 return var.value();
6834 } 6949 }
6835 6950
6836 void CodeStubAssembler::BuildFastFixedArrayForEach( 6951 void CodeStubAssembler::BuildFastFixedArrayForEach(
6837 const CodeStubAssembler::VariableList& vars, Node* fixed_array, 6952 const CodeStubAssembler::VariableList& vars, Node* fixed_array,
6838 ElementsKind kind, Node* first_element_inclusive, 6953 ElementsKind kind, Node* first_element_inclusive,
6839 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, 6954 Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
6840 ParameterMode mode, ForEachDirection direction) { 6955 ParameterMode mode, ForEachDirection direction) {
6841 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 6956 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
6957 CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
6958 CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
6959 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(fixed_array, kind));
6842 int32_t first_val; 6960 int32_t first_val;
6843 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); 6961 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
6844 int32_t last_val; 6962 int32_t last_val;
6845 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); 6963 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
6846 if (constant_first && constent_last) { 6964 if (constant_first && constent_last) {
6847 int delta = last_val - first_val; 6965 int delta = last_val - first_val;
6848 DCHECK(delta >= 0); 6966 DCHECK(delta >= 0);
6849 if (delta <= kElementLoopUnrollThreshold) { 6967 if (delta <= kElementLoopUnrollThreshold) {
6850 if (direction == ForEachDirection::kForward) { 6968 if (direction == ForEachDirection::kForward) {
6851 for (int i = first_val; i < last_val; ++i) { 6969 for (int i = first_val; i < last_val; ++i) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
6892 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; 7010 (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
6893 GotoIf(IntPtrOrSmiGreaterThan( 7011 GotoIf(IntPtrOrSmiGreaterThan(
6894 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), 7012 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
6895 mode), 7013 mode),
6896 doesnt_fit); 7014 doesnt_fit);
6897 } 7015 }
6898 7016
6899 void CodeStubAssembler::InitializeFieldsWithRoot( 7017 void CodeStubAssembler::InitializeFieldsWithRoot(
6900 Node* object, Node* start_offset, Node* end_offset, 7018 Node* object, Node* start_offset, Node* end_offset,
6901 Heap::RootListIndex root_index) { 7019 Heap::RootListIndex root_index) {
7020 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
6902 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); 7021 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
6903 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); 7022 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
6904 Node* root_value = LoadRoot(root_index); 7023 Node* root_value = LoadRoot(root_index);
6905 BuildFastLoop(end_offset, start_offset, 7024 BuildFastLoop(end_offset, start_offset,
6906 [this, object, root_value](Node* current) { 7025 [this, object, root_value](Node* current) {
6907 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, 7026 StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
6908 current, root_value); 7027 current, root_value);
6909 }, 7028 },
6910 -kPointerSize, INTPTR_PARAMETERS, 7029 -kPointerSize, INTPTR_PARAMETERS,
6911 CodeStubAssembler::IndexAdvanceMode::kPre); 7030 CodeStubAssembler::IndexAdvanceMode::kPre);
6912 } 7031 }
6913 7032
6914 void CodeStubAssembler::BranchIfNumericRelationalComparison( 7033 void CodeStubAssembler::BranchIfNumericRelationalComparison(
6915 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, 7034 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
6916 Label* if_false) { 7035 Label* if_false) {
7036 CSA_SLOW_ASSERT(this, IsNumber(lhs));
7037 CSA_SLOW_ASSERT(this, IsNumber(rhs));
7038
6917 Label end(this); 7039 Label end(this);
6918 VARIABLE(result, MachineRepresentation::kTagged); 7040 VARIABLE(result, MachineRepresentation::kTagged);
6919 7041
6920 // Shared entry for floating point comparison. 7042 // Shared entry for floating point comparison.
6921 Label do_fcmp(this); 7043 Label do_fcmp(this);
6922 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); 7044 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
6923 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); 7045 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
6924 7046
6925 // Check if the {lhs} is a Smi or a HeapObject. 7047 // Check if the {lhs} is a Smi or a HeapObject.
6926 Label if_lhsissmi(this), if_lhsisnotsmi(this); 7048 Label if_lhsissmi(this), if_lhsisnotsmi(this);
(...skipping 2260 matching lines...) Expand 10 before | Expand all | Expand 10 after
9187 Load(MachineType::Uint8(), 9309 Load(MachineType::Uint8(),
9188 ExternalConstant( 9310 ExternalConstant(
9189 ExternalReference::promise_hook_or_debug_is_active_address( 9311 ExternalReference::promise_hook_or_debug_is_active_address(
9190 isolate()))); 9312 isolate())));
9191 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); 9313 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
9192 } 9314 }
9193 9315
9194 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, 9316 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
9195 Node* shared_info, 9317 Node* shared_info,
9196 Node* context) { 9318 Node* context) {
9319 CSA_SLOW_ASSERT(this, IsMap(map));
9320
9197 Node* const code = BitcastTaggedToWord( 9321 Node* const code = BitcastTaggedToWord(
9198 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); 9322 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
9199 Node* const code_entry = 9323 Node* const code_entry =
9200 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); 9324 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
9201 9325
9202 Node* const fun = Allocate(JSFunction::kSize); 9326 Node* const fun = Allocate(JSFunction::kSize);
9203 StoreMapNoWriteBarrier(fun, map); 9327 StoreMapNoWriteBarrier(fun, map);
9204 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, 9328 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset,
9205 Heap::kEmptyFixedArrayRootIndex); 9329 Heap::kEmptyFixedArrayRootIndex);
9206 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, 9330 StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
9273 formatted.c_str(), TENURED); 9397 formatted.c_str(), TENURED);
9274 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), 9398 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
9275 HeapConstant(string)); 9399 HeapConstant(string));
9276 } 9400 }
9277 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); 9401 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
9278 #endif 9402 #endif
9279 } 9403 }
9280 9404
9281 } // namespace internal 9405 } // namespace internal
9282 } // namespace v8 9406 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | test/mjsunit/mjsunit.status » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698