Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(194)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2380953002: [stubs] Generalize loop handling in CodeStubAssembler and improve common loop performance (Closed)
Patch Set: Working version Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 #include "src/frames-inl.h" 7 #include "src/frames-inl.h"
8 #include "src/frames.h" 8 #include "src/frames.h"
9 #include "src/ic/handler-configuration.h" 9 #include "src/ic/handler-configuration.h"
10 #include "src/ic/stub-cache.h" 10 #include "src/ic/stub-cache.h"
(...skipping 1483 matching lines...) Expand 10 before | Expand all | Expand 10 after
1494 Heap::RootListIndex value_root_index, ParameterMode mode) { 1494 Heap::RootListIndex value_root_index, ParameterMode mode) {
1495 bool is_double = IsFastDoubleElementsKind(kind); 1495 bool is_double = IsFastDoubleElementsKind(kind);
1496 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || 1496 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
1497 value_root_index == Heap::kUndefinedValueRootIndex); 1497 value_root_index == Heap::kUndefinedValueRootIndex);
1498 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); 1498 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
1499 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); 1499 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
1500 Node* double_hole = 1500 Node* double_hole =
1501 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); 1501 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
1502 Node* value = LoadRoot(value_root_index); 1502 Node* value = LoadRoot(value_root_index);
1503 1503
1504 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 1504 BuildFastFixedArrayForEach(
1505 int32_t to; 1505 array, kind, from_node, to_node,
1506 bool constant_to = ToInt32Constant(to_node, to); 1506 [value, is_double, double_hole](CodeStubAssembler* assembler, Node* array,
1507 int32_t from; 1507 Node* offset) {
1508 bool constant_from = ToInt32Constant(from_node, from); 1508 if (is_double) {
1509 if (constant_to && constant_from && 1509 // Don't use doubles to store the hole double, since manipulating the
1510 (to - from) <= kElementLoopUnrollThreshold) { 1510 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
1511 for (int i = from; i < to; ++i) { 1511 // change its value on ia32 (the x87 stack is used to return values
1512 Node* index = IntPtrConstant(i); 1512 // and stores to the stack silently clear the signalling bit).
1513 if (is_double) { 1513 //
1514 Node* offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, 1514 // TODO(danno): When we have a Float32/Float64 wrapper class that
1515 first_element_offset); 1515 // preserves double bits during manipulation, remove this code/change
1516 // Don't use doubles to store the hole double, since manipulating the 1516 // this to an indexed Float64 store.
1517 // signaling NaN used for the hole in C++, e.g. with bit_cast, will 1517 if (assembler->Is64()) {
1518 // change its value on ia32 (the x87 stack is used to return values 1518 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord64,
1519 // and stores to the stack silently clear the signalling bit). 1519 array, offset, double_hole);
1520 // 1520 } else {
1521 // TODO(danno): When we have a Float32/Float64 wrapper class that 1521 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord32,
1522 // preserves double bits during manipulation, remove this code/change 1522 array, offset, double_hole);
1523 // this to an indexed Float64 store. 1523 assembler->StoreNoWriteBarrier(
1524 if (Is64()) { 1524 MachineRepresentation::kWord32, array,
1525 StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset, 1525 assembler->IntPtrAdd(offset,
1526 double_hole); 1526 assembler->IntPtrConstant(kPointerSize)),
1527 double_hole);
1528 }
1527 } else { 1529 } else {
1528 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset, 1530 assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, array,
1529 double_hole); 1531 offset, value);
1530 offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
1531 first_element_offset + kPointerSize);
1532 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
1533 double_hole);
1534 } 1532 }
1535 } else { 1533 },
1536 StoreFixedArrayElement(array, index, value, SKIP_WRITE_BARRIER, 1534 mode);
1537 INTPTR_PARAMETERS);
1538 }
1539 }
1540 } else {
1541 Variable current(this, MachineRepresentation::kTagged);
1542 Label test(this);
1543 Label decrement(this, &current);
1544 Label done(this);
1545 Node* limit =
1546 IntPtrAdd(array, ElementOffsetFromIndex(from_node, kind, mode));
1547 current.Bind(IntPtrAdd(array, ElementOffsetFromIndex(to_node, kind, mode)));
1548
1549 Branch(WordEqual(current.value(), limit), &done, &decrement);
1550
1551 Bind(&decrement);
1552 current.Bind(IntPtrSub(
1553 current.value(),
1554 IntPtrConstant(IsFastDoubleElementsKind(kind) ? kDoubleSize
1555 : kPointerSize)));
1556 if (is_double) {
1557 // Don't use doubles to store the hole double, since manipulating the
1558 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
1559 // change its value on ia32 (the x87 stack is used to return values
1560 // and stores to the stack silently clear the signalling bit).
1561 //
1562 // TODO(danno): When we have a Float32/Float64 wrapper class that
1563 // preserves double bits during manipulation, remove this code/change
1564 // this to an indexed Float64 store.
1565 if (Is64()) {
1566 StoreNoWriteBarrier(MachineRepresentation::kWord64, current.value(),
1567 Int64Constant(first_element_offset), double_hole);
1568 } else {
1569 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
1570 Int32Constant(first_element_offset), double_hole);
1571 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
1572 Int32Constant(kPointerSize + first_element_offset),
1573 double_hole);
1574 }
1575 } else {
1576 StoreNoWriteBarrier(MachineType::PointerRepresentation(), current.value(),
1577 IntPtrConstant(first_element_offset), value);
1578 }
1579 Node* compare = WordNotEqual(current.value(), limit);
1580 Branch(compare, &decrement, &done);
1581
1582 Bind(&done);
1583 }
1584 } 1535 }
1585 1536
1586 void CodeStubAssembler::CopyFixedArrayElements( 1537 void CodeStubAssembler::CopyFixedArrayElements(
1587 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, 1538 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
1588 Node* to_array, Node* element_count, Node* capacity, 1539 Node* to_array, Node* element_count, Node* capacity,
1589 WriteBarrierMode barrier_mode, ParameterMode mode) { 1540 WriteBarrierMode barrier_mode, ParameterMode mode) {
1590 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 1541 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
1591 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 1542 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
1592 Comment("[ CopyFixedArrayElements"); 1543 Comment("[ CopyFixedArrayElements");
1593 1544
(...skipping 1761 matching lines...) Expand 10 before | Expand all | Expand 10 after
3355 var_entry->Bind(entry); 3306 var_entry->Bind(entry);
3356 Goto(&loop); 3307 Goto(&loop);
3357 } 3308 }
3358 } 3309 }
3359 3310
3360 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, 3311 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
3361 Node* descriptors, Node* nof, 3312 Node* descriptors, Node* nof,
3362 Label* if_found, 3313 Label* if_found,
3363 Variable* var_name_index, 3314 Variable* var_name_index,
3364 Label* if_not_found) { 3315 Label* if_not_found) {
3365 Variable var_descriptor(this, MachineType::PointerRepresentation()); 3316 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
3366 Label loop(this, &var_descriptor); 3317 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize);
3367 var_descriptor.Bind(IntPtrConstant(0)); 3318 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
3368 Goto(&loop);
3369 3319
3370 Bind(&loop); 3320 BuildFastLoop(
3371 { 3321 MachineType::PointerRepresentation(), last_exclusive, first_inclusive,
3372 Node* index = var_descriptor.value(); 3322 [descriptors, unique_name, if_found, var_name_index](
3373 Node* name_offset = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); 3323 CodeStubAssembler* assembler, Node* name_index) {
3374 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); 3324 Node* candidate_name = assembler->LoadFixedArrayElement(
3375 GotoIf(WordEqual(index, nof), if_not_found); 3325 descriptors, name_index, 0, INTPTR_PARAMETERS);
3376 Node* name_index = IntPtrAdd(name_offset, IntPtrMul(index, factor)); 3326 var_name_index->Bind(name_index);
3377 Node* candidate_name = 3327 assembler->GotoIf(assembler->WordEqual(candidate_name, unique_name),
3378 LoadFixedArrayElement(descriptors, name_index, 0, INTPTR_PARAMETERS); 3328 if_found);
3379 var_name_index->Bind(name_index); 3329 },
3380 GotoIf(WordEqual(candidate_name, unique_name), if_found); 3330 -DescriptorArray::kDescriptorSize, IndexAdvanceMode::kPre);
3381 var_descriptor.Bind(IntPtrAdd(index, IntPtrConstant(1))); 3331 Goto(if_not_found);
3382 Goto(&loop);
3383 }
3384 } 3332 }
3385 3333
3386 void CodeStubAssembler::TryLookupProperty( 3334 void CodeStubAssembler::TryLookupProperty(
3387 Node* object, Node* map, Node* instance_type, Node* unique_name, 3335 Node* object, Node* map, Node* instance_type, Node* unique_name,
3388 Label* if_found_fast, Label* if_found_dict, Label* if_found_global, 3336 Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
3389 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found, 3337 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
3390 Label* if_bailout) { 3338 Label* if_bailout) {
3391 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep()); 3339 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
3392 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); 3340 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
3393 3341
(...skipping 2367 matching lines...) Expand 10 before | Expand all | Expand 10 after
5761 StoreObjectField(cell, WeakCell::kValueOffset, value); 5709 StoreObjectField(cell, WeakCell::kValueOffset, value);
5762 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, 5710 StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
5763 Heap::kTheHoleValueRootIndex); 5711 Heap::kTheHoleValueRootIndex);
5764 5712
5765 // Store the WeakCell in the feedback vector. 5713 // Store the WeakCell in the feedback vector.
5766 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 5714 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER,
5767 CodeStubAssembler::SMI_PARAMETERS); 5715 CodeStubAssembler::SMI_PARAMETERS);
5768 return cell; 5716 return cell;
5769 } 5717 }
5770 5718
5719 void CodeStubAssembler::BuildFastLoop(
5720 MachineRepresentation index_rep, Node* start_index, Node* end_index,
5721 std::function<void(CodeStubAssembler* assembler, Node* index)> body,
5722 int increment, IndexAdvanceMode mode) {
5723 Variable var(this, index_rep);
5724 var.Bind(start_index);
5725 Label loop(this, &var);
5726 Label after_loop(this);
5727 // Introduce an explicit second check of the termination condition before the
5728 // loop that helps turbofan generate better code. If there's only a single
5729 // check, then the CodeStubAssembler forces it to be at the beginning of the
5730 // loop requiring a backwards branch at the end of the loop (it's not possible
5731 // to force the loop header check at the end of the loop and branch forward to
5732 // it from the pre-header). The extra branch is slower in the case that the
5733 // loop actually iterates.
5734 BranchIf(WordEqual(var.value(), end_index), &after_loop, &loop);
5735 Bind(&loop);
5736 {
5737 if (mode == IndexAdvanceMode::kPre) {
5738 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
5739 }
5740 body(this, var.value());
5741 if (mode == IndexAdvanceMode::kPost) {
5742 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
5743 }
5744 BranchIf(WordNotEqual(var.value(), end_index), &loop, &after_loop);
5745 }
5746 Bind(&after_loop);
5747 }
5748
5749 void CodeStubAssembler::BuildFastFixedArrayForEach(
5750 compiler::Node* fixed_array, ElementsKind kind,
5751 compiler::Node* first_element_inclusive,
5752 compiler::Node* last_element_exclusive,
5753 std::function<void(CodeStubAssembler* assembler,
5754 compiler::Node* fixed_array, compiler::Node* offset)>
5755 body,
5756 ParameterMode mode, ForEachDirection direction) {
5757 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
5758 int32_t first_val;
5759 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
5760 int32_t last_val;
5761 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
5762 if (constant_first && constent_last) {
5763 int delta = last_val - first_val;
5764 DCHECK(delta >= 0);
5765 if (delta <= kElementLoopUnrollThreshold) {
5766 if (direction == ForEachDirection::kForward) {
5767 for (int i = first_val; i < last_val; ++i) {
5768 Node* index = IntPtrConstant(i);
5769 Node* offset =
5770 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
5771 FixedArray::kHeaderSize - kHeapObjectTag);
5772 body(this, fixed_array, offset);
5773 }
5774 } else {
5775 for (int i = last_val - 1; i >= first_val; --i) {
5776 Node* index = IntPtrConstant(i);
5777 Node* offset =
5778 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
5779 FixedArray::kHeaderSize - kHeapObjectTag);
5780 body(this, fixed_array, offset);
5781 }
5782 }
5783 return;
5784 }
5785 }
5786
5787 Node* start =
5788 ElementOffsetFromIndex(first_element_inclusive, kind, mode,
5789 FixedArray::kHeaderSize - kHeapObjectTag);
5790 Node* limit =
5791 ElementOffsetFromIndex(last_element_exclusive, kind, mode,
5792 FixedArray::kHeaderSize - kHeapObjectTag);
5793 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
5794
5795 int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
5796 BuildFastLoop(
5797 MachineType::PointerRepresentation(), start, limit,
5798 [fixed_array, body](CodeStubAssembler* assembler, Node* offset) {
5799 body(assembler, fixed_array, offset);
5800 },
5801 direction == ForEachDirection::kReverse ? -increment : increment,
5802 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
5803 : IndexAdvanceMode::kPost);
5804 }
5805
5771 } // namespace internal 5806 } // namespace internal
5772 } // namespace v8 5807 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698