Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(606)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2544793002: [stubs] Cleanup storing of maps to objects. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 1306 matching lines...) Expand 10 before | Expand all | Expand 10 after
1317 return LoadContextElement(native_context, Context::ArrayMapIndex(kind)); 1317 return LoadContextElement(native_context, Context::ArrayMapIndex(kind));
1318 } 1318 }
1319 1319
1320 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { 1320 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
1321 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, 1321 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
1322 MachineRepresentation::kFloat64); 1322 MachineRepresentation::kFloat64);
1323 } 1323 }
1324 1324
1325 Node* CodeStubAssembler::StoreObjectField( 1325 Node* CodeStubAssembler::StoreObjectField(
1326 Node* object, int offset, Node* value) { 1326 Node* object, int offset, Node* value) {
1327 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
1327 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value); 1328 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
1328 } 1329 }
1329 1330
1330 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset, 1331 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
1331 Node* value) { 1332 Node* value) {
1332 int const_offset; 1333 int const_offset;
1333 if (ToInt32Constant(offset, const_offset)) { 1334 if (ToInt32Constant(offset, const_offset)) {
1334 return StoreObjectField(object, const_offset, value); 1335 return StoreObjectField(object, const_offset, value);
1335 } 1336 }
1336 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), 1337 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
1337 value); 1338 value);
1338 } 1339 }
1339 1340
1340 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( 1341 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1341 Node* object, int offset, Node* value, MachineRepresentation rep) { 1342 Node* object, int offset, Node* value, MachineRepresentation rep) {
1342 return StoreNoWriteBarrier(rep, object, 1343 return StoreNoWriteBarrier(rep, object,
1343 IntPtrConstant(offset - kHeapObjectTag), value); 1344 IntPtrConstant(offset - kHeapObjectTag), value);
1344 } 1345 }
1345 1346
1346 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( 1347 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1347 Node* object, Node* offset, Node* value, MachineRepresentation rep) { 1348 Node* object, Node* offset, Node* value, MachineRepresentation rep) {
1348 int const_offset; 1349 int const_offset;
1349 if (ToInt32Constant(offset, const_offset)) { 1350 if (ToInt32Constant(offset, const_offset)) {
1350 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); 1351 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
1351 } 1352 }
1352 return StoreNoWriteBarrier( 1353 return StoreNoWriteBarrier(
1353 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); 1354 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1354 } 1355 }
1355 1356
1357 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
1358 CSA_SLOW_ASSERT(this, IsMap(map));
1359 return StoreWithMapWriteBarrier(
1360 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1361 }
1362
1363 Node* CodeStubAssembler::StoreMapNoWriteBarrier(
1364 Node* object, Heap::RootListIndex map_root_index) {
1365 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
1366 }
1367
1356 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { 1368 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
1369 CSA_SLOW_ASSERT(this, IsMap(map));
1357 return StoreNoWriteBarrier( 1370 return StoreNoWriteBarrier(
1358 MachineRepresentation::kTagged, object, 1371 MachineRepresentation::kTagged, object,
1359 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map); 1372 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1360 } 1373 }
1361 1374
1362 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset, 1375 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
1363 Heap::RootListIndex root_index) { 1376 Heap::RootListIndex root_index) {
1364 if (Heap::RootIsImmortalImmovable(root_index)) { 1377 if (Heap::RootIsImmortalImmovable(root_index)) {
1365 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); 1378 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
1366 } else { 1379 } else {
1367 return StoreObjectField(object, offset, LoadRoot(root_index)); 1380 return StoreObjectField(object, offset, LoadRoot(root_index));
1368 } 1381 }
1369 } 1382 }
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1468 1481
1469 Bind(&success); 1482 Bind(&success);
1470 return length.value(); 1483 return length.value();
1471 } 1484 }
1472 1485
1473 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) { 1486 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
1474 Node* result = Allocate(HeapNumber::kSize, kNone); 1487 Node* result = Allocate(HeapNumber::kSize, kNone);
1475 Heap::RootListIndex heap_map_index = 1488 Heap::RootListIndex heap_map_index =
1476 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex 1489 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
1477 : Heap::kMutableHeapNumberMapRootIndex; 1490 : Heap::kMutableHeapNumberMapRootIndex;
1478 Node* map = LoadRoot(heap_map_index); 1491 StoreMapNoWriteBarrier(result, heap_map_index);
1479 StoreMapNoWriteBarrier(result, map);
1480 return result; 1492 return result;
1481 } 1493 }
1482 1494
1483 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value, 1495 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
1484 MutableMode mode) { 1496 MutableMode mode) {
1485 Node* result = AllocateHeapNumber(mode); 1497 Node* result = AllocateHeapNumber(mode);
1486 StoreHeapNumberValue(result, value); 1498 StoreHeapNumberValue(result, value);
1487 return result; 1499 return result;
1488 } 1500 }
1489 1501
1490 Node* CodeStubAssembler::AllocateSeqOneByteString(int length, 1502 Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
1491 AllocationFlags flags) { 1503 AllocationFlags flags) {
1492 Comment("AllocateSeqOneByteString"); 1504 Comment("AllocateSeqOneByteString");
1493 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags); 1505 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
1494 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); 1506 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1495 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex)); 1507 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1496 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset, 1508 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1497 SmiConstant(Smi::FromInt(length))); 1509 SmiConstant(Smi::FromInt(length)));
1498 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset, 1510 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
1499 IntPtrConstant(String::kEmptyHashField), 1511 IntPtrConstant(String::kEmptyHashField),
1500 MachineRepresentation::kWord32); 1512 MachineRepresentation::kWord32);
1501 return result; 1513 return result;
1502 } 1514 }
1503 1515
1504 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, 1516 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1505 ParameterMode mode, 1517 ParameterMode mode,
1506 AllocationFlags flags) { 1518 AllocationFlags flags) {
1507 Comment("AllocateSeqOneByteString"); 1519 Comment("AllocateSeqOneByteString");
1508 Variable var_result(this, MachineRepresentation::kTagged); 1520 Variable var_result(this, MachineRepresentation::kTagged);
1509 1521
1510 // Compute the SeqOneByteString size and check if it fits into new space. 1522 // Compute the SeqOneByteString size and check if it fits into new space.
1511 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), 1523 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1512 if_join(this); 1524 if_join(this);
1513 Node* raw_size = GetArrayAllocationSize( 1525 Node* raw_size = GetArrayAllocationSize(
1514 length, UINT8_ELEMENTS, mode, 1526 length, UINT8_ELEMENTS, mode,
1515 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1527 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1516 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask)); 1528 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1517 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)), 1529 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1518 &if_sizeissmall, &if_notsizeissmall); 1530 &if_sizeissmall, &if_notsizeissmall);
1519 1531
1520 Bind(&if_sizeissmall); 1532 Bind(&if_sizeissmall);
1521 { 1533 {
1522 // Just allocate the SeqOneByteString in new space. 1534 // Just allocate the SeqOneByteString in new space.
1523 Node* result = Allocate(size, flags); 1535 Node* result = Allocate(size, flags);
1524 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); 1536 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1525 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex)); 1537 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1526 StoreObjectFieldNoWriteBarrier( 1538 StoreObjectFieldNoWriteBarrier(
1527 result, SeqOneByteString::kLengthOffset, 1539 result, SeqOneByteString::kLengthOffset,
1528 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1540 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1529 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset, 1541 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
1530 IntPtrConstant(String::kEmptyHashField), 1542 IntPtrConstant(String::kEmptyHashField),
1531 MachineRepresentation::kWord32); 1543 MachineRepresentation::kWord32);
1532 var_result.Bind(result); 1544 var_result.Bind(result);
1533 Goto(&if_join); 1545 Goto(&if_join);
1534 } 1546 }
1535 1547
1536 Bind(&if_notsizeissmall); 1548 Bind(&if_notsizeissmall);
1537 { 1549 {
1538 // We might need to allocate in large object space, go to the runtime. 1550 // We might need to allocate in large object space, go to the runtime.
1539 Node* result = 1551 Node* result =
1540 CallRuntime(Runtime::kAllocateSeqOneByteString, context, 1552 CallRuntime(Runtime::kAllocateSeqOneByteString, context,
1541 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1553 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1542 var_result.Bind(result); 1554 var_result.Bind(result);
1543 Goto(&if_join); 1555 Goto(&if_join);
1544 } 1556 }
1545 1557
1546 Bind(&if_join); 1558 Bind(&if_join);
1547 return var_result.value(); 1559 return var_result.value();
1548 } 1560 }
1549 1561
1550 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length, 1562 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
1551 AllocationFlags flags) { 1563 AllocationFlags flags) {
1552 Comment("AllocateSeqTwoByteString"); 1564 Comment("AllocateSeqTwoByteString");
1553 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags); 1565 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
1554 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); 1566 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1555 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex)); 1567 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1556 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset, 1568 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
1557 SmiConstant(Smi::FromInt(length))); 1569 SmiConstant(Smi::FromInt(length)));
1558 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, 1570 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
1559 IntPtrConstant(String::kEmptyHashField), 1571 IntPtrConstant(String::kEmptyHashField),
1560 MachineRepresentation::kWord32); 1572 MachineRepresentation::kWord32);
1561 return result; 1573 return result;
1562 } 1574 }
1563 1575
1564 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, 1576 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1565 ParameterMode mode, 1577 ParameterMode mode,
1566 AllocationFlags flags) { 1578 AllocationFlags flags) {
1567 Comment("AllocateSeqTwoByteString"); 1579 Comment("AllocateSeqTwoByteString");
1568 Variable var_result(this, MachineRepresentation::kTagged); 1580 Variable var_result(this, MachineRepresentation::kTagged);
1569 1581
1570 // Compute the SeqTwoByteString size and check if it fits into new space. 1582 // Compute the SeqTwoByteString size and check if it fits into new space.
1571 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), 1583 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1572 if_join(this); 1584 if_join(this);
1573 Node* raw_size = GetArrayAllocationSize( 1585 Node* raw_size = GetArrayAllocationSize(
1574 length, UINT16_ELEMENTS, mode, 1586 length, UINT16_ELEMENTS, mode,
1575 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); 1587 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1576 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask)); 1588 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1577 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)), 1589 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1578 &if_sizeissmall, &if_notsizeissmall); 1590 &if_sizeissmall, &if_notsizeissmall);
1579 1591
1580 Bind(&if_sizeissmall); 1592 Bind(&if_sizeissmall);
1581 { 1593 {
1582 // Just allocate the SeqTwoByteString in new space. 1594 // Just allocate the SeqTwoByteString in new space.
1583 Node* result = Allocate(size, flags); 1595 Node* result = Allocate(size, flags);
1584 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); 1596 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1585 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex)); 1597 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1586 StoreObjectFieldNoWriteBarrier( 1598 StoreObjectFieldNoWriteBarrier(
1587 result, SeqTwoByteString::kLengthOffset, 1599 result, SeqTwoByteString::kLengthOffset,
1588 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1600 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1589 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, 1601 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
1590 IntPtrConstant(String::kEmptyHashField), 1602 IntPtrConstant(String::kEmptyHashField),
1591 MachineRepresentation::kWord32); 1603 MachineRepresentation::kWord32);
1592 var_result.Bind(result); 1604 var_result.Bind(result);
1593 Goto(&if_join); 1605 Goto(&if_join);
1594 } 1606 }
1595 1607
1596 Bind(&if_notsizeissmall); 1608 Bind(&if_notsizeissmall);
1597 { 1609 {
1598 // We might need to allocate in large object space, go to the runtime. 1610 // We might need to allocate in large object space, go to the runtime.
1599 Node* result = 1611 Node* result =
1600 CallRuntime(Runtime::kAllocateSeqTwoByteString, context, 1612 CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
1601 mode == SMI_PARAMETERS ? length : SmiFromWord(length)); 1613 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1602 var_result.Bind(result); 1614 var_result.Bind(result);
1603 Goto(&if_join); 1615 Goto(&if_join);
1604 } 1616 }
1605 1617
1606 Bind(&if_join); 1618 Bind(&if_join);
1607 return var_result.value(); 1619 return var_result.value();
1608 } 1620 }
1609 1621
1610 Node* CodeStubAssembler::AllocateSlicedString( 1622 Node* CodeStubAssembler::AllocateSlicedString(
1611 Heap::RootListIndex map_root_index, Node* length, Node* parent, 1623 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1612 Node* offset) { 1624 Node* offset) {
1613 CSA_ASSERT(this, TaggedIsSmi(length)); 1625 CSA_ASSERT(this, TaggedIsSmi(length));
1614 Node* result = Allocate(SlicedString::kSize); 1626 Node* result = Allocate(SlicedString::kSize);
1615 Node* map = LoadRoot(map_root_index);
1616 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1627 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1617 StoreMapNoWriteBarrier(result, map); 1628 StoreMapNoWriteBarrier(result, map_root_index);
1618 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, 1629 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1619 MachineRepresentation::kTagged); 1630 MachineRepresentation::kTagged);
1620 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset, 1631 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
1621 Int32Constant(String::kEmptyHashField), 1632 Int32Constant(String::kEmptyHashField),
1622 MachineRepresentation::kWord32); 1633 MachineRepresentation::kWord32);
1623 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, 1634 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
1624 MachineRepresentation::kTagged); 1635 MachineRepresentation::kTagged);
1625 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset, 1636 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
1626 MachineRepresentation::kTagged); 1637 MachineRepresentation::kTagged);
1627 return result; 1638 return result;
(...skipping 10 matching lines...) Expand all
1638 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, 1649 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1639 offset); 1650 offset);
1640 } 1651 }
1641 1652
1642 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, 1653 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1643 Node* length, Node* first, 1654 Node* length, Node* first,
1644 Node* second, 1655 Node* second,
1645 AllocationFlags flags) { 1656 AllocationFlags flags) {
1646 CSA_ASSERT(this, TaggedIsSmi(length)); 1657 CSA_ASSERT(this, TaggedIsSmi(length));
1647 Node* result = Allocate(ConsString::kSize, flags); 1658 Node* result = Allocate(ConsString::kSize, flags);
1648 Node* map = LoadRoot(map_root_index);
1649 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 1659 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1650 StoreMapNoWriteBarrier(result, map); 1660 StoreMapNoWriteBarrier(result, map_root_index);
1651 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, 1661 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1652 MachineRepresentation::kTagged); 1662 MachineRepresentation::kTagged);
1653 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset, 1663 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
1654 Int32Constant(String::kEmptyHashField), 1664 Int32Constant(String::kEmptyHashField),
1655 MachineRepresentation::kWord32); 1665 MachineRepresentation::kWord32);
1656 bool const new_space = !(flags & kPretenured); 1666 bool const new_space = !(flags & kPretenured);
1657 if (new_space) { 1667 if (new_space) {
1658 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first, 1668 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
1659 MachineRepresentation::kTagged); 1669 MachineRepresentation::kTagged);
1660 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second, 1670 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
1791 CSA_ASSERT(this, WordIsPowerOfTwo(capacity)); 1801 CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
1792 1802
1793 Node* length = EntryToIndex<NameDictionary>(capacity); 1803 Node* length = EntryToIndex<NameDictionary>(capacity);
1794 Node* store_size = 1804 Node* store_size =
1795 IntPtrAddFoldConstants(WordShl(length, IntPtrConstant(kPointerSizeLog2)), 1805 IntPtrAddFoldConstants(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
1796 IntPtrConstant(NameDictionary::kHeaderSize)); 1806 IntPtrConstant(NameDictionary::kHeaderSize));
1797 1807
1798 Node* result = Allocate(store_size); 1808 Node* result = Allocate(store_size);
1799 Comment("Initialize NameDictionary"); 1809 Comment("Initialize NameDictionary");
1800 // Initialize FixedArray fields. 1810 // Initialize FixedArray fields.
1801 StoreObjectFieldRoot(result, FixedArray::kMapOffset, 1811 DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
1802 Heap::kHashTableMapRootIndex); 1812 StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
1803 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, 1813 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
1804 SmiFromWord(length)); 1814 SmiFromWord(length));
1805 // Initialized HashTable fields. 1815 // Initialized HashTable fields.
1806 Node* zero = SmiConstant(0); 1816 Node* zero = SmiConstant(0);
1807 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero, 1817 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
1808 SKIP_WRITE_BARRIER); 1818 SKIP_WRITE_BARRIER);
1809 StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex, 1819 StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
1810 zero, SKIP_WRITE_BARRIER); 1820 zero, SKIP_WRITE_BARRIER);
1811 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex, 1821 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
1812 SmiTag(capacity), SKIP_WRITE_BARRIER); 1822 SmiTag(capacity), SKIP_WRITE_BARRIER);
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 if (allocation_site != nullptr) { 1961 if (allocation_site != nullptr) {
1952 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); 1962 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
1953 } 1963 }
1954 return array; 1964 return array;
1955 } 1965 }
1956 1966
1957 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, 1967 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
1958 Node* capacity, Node* length, 1968 Node* capacity, Node* length,
1959 Node* allocation_site, 1969 Node* allocation_site,
1960 ParameterMode capacity_mode) { 1970 ParameterMode capacity_mode) {
1961 bool is_double = IsFastDoubleElementsKind(kind);
1962
1963 // Allocate both array and elements object, and initialize the JSArray. 1971 // Allocate both array and elements object, and initialize the JSArray.
1964 Node *array, *elements; 1972 Node *array, *elements;
1965 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements( 1973 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
1966 kind, array_map, length, allocation_site, capacity, capacity_mode); 1974 kind, array_map, length, allocation_site, capacity, capacity_mode);
1967 // Setup elements object. 1975 // Setup elements object.
1968 Heap* heap = isolate()->heap(); 1976 Heap::RootListIndex elements_map_index =
1969 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map() 1977 IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
1970 : heap->fixed_array_map()); 1978 : Heap::kFixedArrayMapRootIndex;
1971 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map)); 1979 DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
1980 StoreMapNoWriteBarrier(elements, elements_map_index);
1972 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, 1981 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
1973 TagParameter(capacity, capacity_mode)); 1982 TagParameter(capacity, capacity_mode));
1974 1983
1975 // Fill in the elements with holes. 1984 // Fill in the elements with holes.
1976 FillFixedArrayWithValue( 1985 FillFixedArrayWithValue(
1977 kind, elements, capacity_mode == SMI_PARAMETERS ? SmiConstant(Smi::kZero) 1986 kind, elements, capacity_mode == SMI_PARAMETERS ? SmiConstant(Smi::kZero)
1978 : IntPtrConstant(0), 1987 : IntPtrConstant(0),
1979 capacity, Heap::kTheHoleValueRootIndex, capacity_mode); 1988 capacity, Heap::kTheHoleValueRootIndex, capacity_mode);
1980 1989
1981 return array; 1990 return array;
1982 } 1991 }
1983 1992
1984 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, 1993 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
1985 Node* capacity_node, 1994 Node* capacity_node,
1986 ParameterMode mode, 1995 ParameterMode mode,
1987 AllocationFlags flags) { 1996 AllocationFlags flags) {
1988 CSA_ASSERT(this, 1997 CSA_ASSERT(this,
1989 IntPtrGreaterThan(capacity_node, IntPtrOrSmiConstant(0, mode))); 1998 IntPtrGreaterThan(capacity_node, IntPtrOrSmiConstant(0, mode)));
1990 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); 1999 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
1991 2000
1992 // Allocate both array and elements object, and initialize the JSArray. 2001 // Allocate both array and elements object, and initialize the JSArray.
1993 Node* array = Allocate(total_size, flags); 2002 Node* array = Allocate(total_size, flags);
1994 Heap* heap = isolate()->heap(); 2003 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
1995 Handle<Map> map(IsFastDoubleElementsKind(kind) 2004 ? Heap::kFixedDoubleArrayMapRootIndex
1996 ? heap->fixed_double_array_map() 2005 : Heap::kFixedArrayMapRootIndex;
1997 : heap->fixed_array_map()); 2006 DCHECK(Heap::RootIsImmortalImmovable(map_index));
1998 if (flags & kPretenured) { 2007 StoreMapNoWriteBarrier(array, map_index);
1999 StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map));
2000 } else {
2001 StoreMapNoWriteBarrier(array, HeapConstant(map));
2002 }
2003 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, 2008 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2004 TagParameter(capacity_node, mode)); 2009 TagParameter(capacity_node, mode));
2005 return array; 2010 return array;
2006 } 2011 }
2007 2012
2008 void CodeStubAssembler::FillFixedArrayWithValue( 2013 void CodeStubAssembler::FillFixedArrayWithValue(
2009 ElementsKind kind, Node* array, Node* from_node, Node* to_node, 2014 ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2010 Heap::RootListIndex value_root_index, ParameterMode mode) { 2015 Heap::RootListIndex value_root_index, ParameterMode mode) {
2011 bool is_double = IsFastDoubleElementsKind(kind); 2016 bool is_double = IsFastDoubleElementsKind(kind);
2012 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || 2017 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
(...skipping 3962 matching lines...) Expand 10 before | Expand all | Expand 10 after
5975 Node* array_length = 5980 Node* array_length =
5976 is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset)) 5981 is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset))
5977 : elements_length; 5982 : elements_length;
5978 5983
5979 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length, 5984 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
5980 elements_length, mode, bailout); 5985 elements_length, mode, bailout);
5981 Goto(&done); 5986 Goto(&done);
5982 Bind(&done); 5987 Bind(&done);
5983 } 5988 }
5984 5989
5985 StoreObjectField(object, JSObject::kMapOffset, map); 5990 StoreMap(object, map);
5986 } 5991 }
5987 5992
5988 void CodeStubAssembler::TrapAllocationMemento(Node* object, 5993 void CodeStubAssembler::TrapAllocationMemento(Node* object,
5989 Label* memento_found) { 5994 Label* memento_found) {
5990 Comment("[ TrapAllocationMemento"); 5995 Comment("[ TrapAllocationMemento");
5991 Label no_memento_found(this); 5996 Label no_memento_found(this);
5992 Label top_check(this), map_check(this); 5997 Label top_check(this), map_check(this);
5993 5998
5994 Node* new_space_top_address = ExternalConstant( 5999 Node* new_space_top_address = ExternalConstant(
5995 ExternalReference::new_space_allocation_top_address(isolate())); 6000 ExternalReference::new_space_allocation_top_address(isolate()));
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
6115 Node* zero_constant = SmiConstant(Smi::kZero); 6120 Node* zero_constant = SmiConstant(Smi::kZero);
6116 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime); 6121 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime);
6117 } 6122 }
6118 } 6123 }
6119 6124
6120 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector( 6125 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
6121 Node* feedback_vector, Node* slot) { 6126 Node* feedback_vector, Node* slot) {
6122 Node* size = IntPtrConstant(AllocationSite::kSize); 6127 Node* size = IntPtrConstant(AllocationSite::kSize);
6123 Node* site = Allocate(size, CodeStubAssembler::kPretenured); 6128 Node* site = Allocate(size, CodeStubAssembler::kPretenured);
6124 6129
6125 // Store the map 6130 StoreMap(site, LoadRoot(Heap::kAllocationSiteMapRootIndex));
6126 StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
6127 Heap::kAllocationSiteMapRootIndex);
6128 Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind())); 6131 Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
6129 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset, 6132 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
6130 kind); 6133 kind);
6131 6134
6132 // Unlike literals, constructed arrays don't have nested sites 6135 // Unlike literals, constructed arrays don't have nested sites
6133 Node* zero = IntPtrConstant(0); 6136 Node* zero = IntPtrConstant(0);
6134 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero); 6137 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
6135 6138
6136 // Pretenuring calculation field. 6139 // Pretenuring calculation field.
6137 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset, 6140 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
(...skipping 25 matching lines...) Expand all
6163 return site; 6166 return site;
6164 } 6167 }
6165 6168
6166 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector, 6169 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
6167 Node* slot, 6170 Node* slot,
6168 Node* value) { 6171 Node* value) {
6169 Node* size = IntPtrConstant(WeakCell::kSize); 6172 Node* size = IntPtrConstant(WeakCell::kSize);
6170 Node* cell = Allocate(size, CodeStubAssembler::kPretenured); 6173 Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
6171 6174
6172 // Initialize the WeakCell. 6175 // Initialize the WeakCell.
6173 StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex); 6176 DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
6177 StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
6174 StoreObjectField(cell, WeakCell::kValueOffset, value); 6178 StoreObjectField(cell, WeakCell::kValueOffset, value);
6175 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, 6179 StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
6176 Heap::kTheHoleValueRootIndex); 6180 Heap::kTheHoleValueRootIndex);
6177 6181
6178 // Store the WeakCell in the feedback vector. 6182 // Store the WeakCell in the feedback vector.
6179 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, 6183 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
6180 CodeStubAssembler::SMI_PARAMETERS); 6184 CodeStubAssembler::SMI_PARAMETERS);
6181 return cell; 6185 return cell;
6182 } 6186 }
6183 6187
(...skipping 1972 matching lines...) Expand 10 before | Expand all | Expand 10 after
8156 8160
8157 Node* CodeStubAssembler::IsDebugActive() { 8161 Node* CodeStubAssembler::IsDebugActive() {
8158 Node* is_debug_active = Load( 8162 Node* is_debug_active = Load(
8159 MachineType::Uint8(), 8163 MachineType::Uint8(),
8160 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); 8164 ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
8161 return WordNotEqual(is_debug_active, Int32Constant(0)); 8165 return WordNotEqual(is_debug_active, Int32Constant(0));
8162 } 8166 }
8163 8167
8164 } // namespace internal 8168 } // namespace internal
8165 } // namespace v8 8169 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698