OLD | NEW |
---|---|
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1475 Heap::RootListIndex value_root_index, ParameterMode mode) { | 1475 Heap::RootListIndex value_root_index, ParameterMode mode) { |
1476 bool is_double = IsFastDoubleElementsKind(kind); | 1476 bool is_double = IsFastDoubleElementsKind(kind); |
1477 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || | 1477 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || |
1478 value_root_index == Heap::kUndefinedValueRootIndex); | 1478 value_root_index == Heap::kUndefinedValueRootIndex); |
1479 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); | 1479 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); |
1480 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); | 1480 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); |
1481 Node* double_hole = | 1481 Node* double_hole = |
1482 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); | 1482 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); |
1483 Node* value = LoadRoot(value_root_index); | 1483 Node* value = LoadRoot(value_root_index); |
1484 | 1484 |
1485 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; | 1485 BuildFastFixedArrayForEach( |
1486 int32_t to; | 1486 array, kind, from_node, to_node, |
1487 bool constant_to = ToInt32Constant(to_node, to); | 1487 [value, is_double, double_hole](CodeStubAssembler* assembler, Node* array, |
1488 int32_t from; | 1488 Node* offset) { |
1489 bool constant_from = ToInt32Constant(from_node, from); | 1489 if (is_double) { |
1490 if (constant_to && constant_from && | 1490 // Don't use doubles to store the hole double, since manipulating the |
1491 (to - from) <= kElementLoopUnrollThreshold) { | 1491 // signaling NaN used for the hole in C++, e.g. with bit_cast, will |
1492 for (int i = from; i < to; ++i) { | 1492 // change its value on ia32 (the x87 stack is used to return values |
1493 Node* index = IntPtrConstant(i); | 1493 // and stores to the stack silently clear the signalling bit). |
1494 if (is_double) { | 1494 // |
1495 Node* offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, | 1495 // TODO(danno): When we have a Float32/Float64 wrapper class that |
1496 first_element_offset); | 1496 // preserves double bits during manipulation, remove this code/change |
1497 // Don't use doubles to store the hole double, since manipulating the | 1497 // this to an indexed Float64 store. |
1498 // signaling NaN used for the hole in C++, e.g. with bit_cast, will | 1498 if (assembler->Is64()) { |
1499 // change its value on ia32 (the x87 stack is used to return values | 1499 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord64, |
1500 // and stores to the stack silently clear the signalling bit). | 1500 array, offset, double_hole); |
1501 // | 1501 } else { |
1502 // TODO(danno): When we have a Float32/Float64 wrapper class that | 1502 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord32, |
1503 // preserves double bits during manipulation, remove this code/change | 1503 array, offset, double_hole); |
1504 // this to an indexed Float64 store. | 1504 assembler->StoreNoWriteBarrier( |
1505 if (Is64()) { | 1505 MachineRepresentation::kWord32, array, |
1506 StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset, | 1506 assembler->IntPtrAdd(offset, |
1507 double_hole); | 1507 assembler->IntPtrConstant(kPointerSize)), |
1508 double_hole); | |
1509 } | |
1508 } else { | 1510 } else { |
1509 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset, | 1511 assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, array, |
1510 double_hole); | 1512 offset, value); |
1511 offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, | |
1512 first_element_offset + kPointerSize); | |
1513 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset, | |
1514 double_hole); | |
1515 } | 1513 } |
1516 } else { | 1514 }, |
1517 StoreFixedArrayElement(array, index, value, SKIP_WRITE_BARRIER, | 1515 mode); |
1518 INTPTR_PARAMETERS); | |
1519 } | |
1520 } | |
1521 } else { | |
1522 Variable current(this, MachineRepresentation::kTagged); | |
1523 Label test(this); | |
1524 Label decrement(this, ¤t); | |
1525 Label done(this); | |
1526 Node* limit = | |
1527 IntPtrAdd(array, ElementOffsetFromIndex(from_node, kind, mode)); | |
1528 current.Bind(IntPtrAdd(array, ElementOffsetFromIndex(to_node, kind, mode))); | |
1529 | |
1530 Branch(WordEqual(current.value(), limit), &done, &decrement); | |
1531 | |
1532 Bind(&decrement); | |
1533 current.Bind(IntPtrSub( | |
1534 current.value(), | |
1535 IntPtrConstant(IsFastDoubleElementsKind(kind) ? kDoubleSize | |
1536 : kPointerSize))); | |
1537 if (is_double) { | |
1538 // Don't use doubles to store the hole double, since manipulating the | |
1539 // signaling NaN used for the hole in C++, e.g. with bit_cast, will | |
1540 // change its value on ia32 (the x87 stack is used to return values | |
1541 // and stores to the stack silently clear the signalling bit). | |
1542 // | |
1543 // TODO(danno): When we have a Float32/Float64 wrapper class that | |
1544 // preserves double bits during manipulation, remove this code/change | |
1545 // this to an indexed Float64 store. | |
1546 if (Is64()) { | |
1547 StoreNoWriteBarrier(MachineRepresentation::kWord64, current.value(), | |
1548 Int64Constant(first_element_offset), double_hole); | |
1549 } else { | |
1550 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(), | |
1551 Int32Constant(first_element_offset), double_hole); | |
1552 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(), | |
1553 Int32Constant(kPointerSize + first_element_offset), | |
1554 double_hole); | |
1555 } | |
1556 } else { | |
1557 StoreNoWriteBarrier(MachineType::PointerRepresentation(), current.value(), | |
1558 IntPtrConstant(first_element_offset), value); | |
1559 } | |
1560 Node* compare = WordNotEqual(current.value(), limit); | |
1561 Branch(compare, &decrement, &done); | |
1562 | |
1563 Bind(&done); | |
1564 } | |
1565 } | 1516 } |
1566 | 1517 |
1567 void CodeStubAssembler::CopyFixedArrayElements( | 1518 void CodeStubAssembler::CopyFixedArrayElements( |
1568 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, | 1519 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, |
1569 Node* to_array, Node* element_count, Node* capacity, | 1520 Node* to_array, Node* element_count, Node* capacity, |
1570 WriteBarrierMode barrier_mode, ParameterMode mode) { | 1521 WriteBarrierMode barrier_mode, ParameterMode mode) { |
1571 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | 1522 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
1572 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; | 1523 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; |
1573 Comment("[ CopyFixedArrayElements"); | 1524 Comment("[ CopyFixedArrayElements"); |
1574 | 1525 |
(...skipping 1761 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3336 var_entry->Bind(entry); | 3287 var_entry->Bind(entry); |
3337 Goto(&loop); | 3288 Goto(&loop); |
3338 } | 3289 } |
3339 } | 3290 } |
3340 | 3291 |
3341 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, | 3292 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, |
3342 Node* descriptors, Node* nof, | 3293 Node* descriptors, Node* nof, |
3343 Label* if_found, | 3294 Label* if_found, |
3344 Variable* var_name_index, | 3295 Variable* var_name_index, |
3345 Label* if_not_found) { | 3296 Label* if_not_found) { |
3346 Variable var_descriptor(this, MachineType::PointerRepresentation()); | 3297 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); |
3347 Label loop(this, &var_descriptor); | 3298 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); |
3348 var_descriptor.Bind(IntPtrConstant(0)); | 3299 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor)); |
3349 Goto(&loop); | |
3350 | 3300 |
3351 Bind(&loop); | 3301 BuildFastLoop( |
3352 { | 3302 MachineType::PointerRepresentation(), last_exclusive, first_inclusive, |
3353 Node* index = var_descriptor.value(); | 3303 [descriptors, unique_name, if_found, var_name_index]( |
3354 Node* name_offset = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); | 3304 CodeStubAssembler* assembler, Node* name_index) { |
3355 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); | 3305 Node* candidate_name = assembler->LoadFixedArrayElement( |
3356 GotoIf(WordEqual(index, nof), if_not_found); | 3306 descriptors, name_index, 0, INTPTR_PARAMETERS); |
3357 Node* name_index = IntPtrAdd(name_offset, IntPtrMul(index, factor)); | 3307 var_name_index->Bind(name_index); |
3358 Node* candidate_name = | 3308 assembler->GotoIf(assembler->WordEqual(candidate_name, unique_name), |
3359 LoadFixedArrayElement(descriptors, name_index, 0, INTPTR_PARAMETERS); | 3309 if_found); |
3360 var_name_index->Bind(name_index); | 3310 }, |
3361 GotoIf(WordEqual(candidate_name, unique_name), if_found); | 3311 -DescriptorArray::kDescriptorSize, IndexAdvanceMode::kPre); |
3362 var_descriptor.Bind(IntPtrAdd(index, IntPtrConstant(1))); | 3312 Goto(if_not_found); |
3363 Goto(&loop); | |
3364 } | |
3365 } | 3313 } |
3366 | 3314 |
3367 void CodeStubAssembler::TryLookupProperty( | 3315 void CodeStubAssembler::TryLookupProperty( |
3368 Node* object, Node* map, Node* instance_type, Node* unique_name, | 3316 Node* object, Node* map, Node* instance_type, Node* unique_name, |
3369 Label* if_found_fast, Label* if_found_dict, Label* if_found_global, | 3317 Label* if_found_fast, Label* if_found_dict, Label* if_found_global, |
3370 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found, | 3318 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found, |
3371 Label* if_bailout) { | 3319 Label* if_bailout) { |
3372 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep()); | 3320 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep()); |
3373 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); | 3321 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); |
3374 | 3322 |
(...skipping 2362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5737 StoreObjectField(cell, WeakCell::kValueOffset, value); | 5685 StoreObjectField(cell, WeakCell::kValueOffset, value); |
5738 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, | 5686 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, |
5739 Heap::kTheHoleValueRootIndex); | 5687 Heap::kTheHoleValueRootIndex); |
5740 | 5688 |
5741 // Store the WeakCell in the feedback vector. | 5689 // Store the WeakCell in the feedback vector. |
5742 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, | 5690 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, |
5743 CodeStubAssembler::SMI_PARAMETERS); | 5691 CodeStubAssembler::SMI_PARAMETERS); |
5744 return cell; | 5692 return cell; |
5745 } | 5693 } |
5746 | 5694 |
5695 void CodeStubAssembler::BuildFastLoop( | |
5696 MachineRepresentation index_rep, Node* start_index, Node* end_index, | |
5697 std::function<void(CodeStubAssembler* assembler, Node* index)> body, | |
5698 int increment, IndexAdvanceMode mode) { | |
5699 Variable var(this, index_rep); | |
5700 var.Bind(start_index); | |
5701 Label loop(this, &var); | |
5702 Label after_loop(this); | |
5703 // Introduce an explicit second check of the termination condition before the | |
5704 // loop that help's turbofan generate better code. If there's only a single | |
Igor Sheludko
2016/10/05 07:33:19
s/help's/helps/
| |
5705 // check, then the CodeStubAssembler forces it to be at the beginning of the | |
5706 // loop requiring a backwards branch at the end of the loop (it's not possible | |
5707 // to force the loop header check at the end of the loop and branch forward to | |
5708 // it from the pre-header). The extra branch is slower in the case that the | |
5709 // loop actually iterates. | |
5710 BranchIf(WordEqual(var.value(), end_index), &after_loop, &loop); | |
5711 Bind(&loop); | |
5712 { | |
5713 if (mode == IndexAdvanceMode::kPre) { | |
5714 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); | |
5715 } | |
5716 body(this, var.value()); | |
5717 if (mode == IndexAdvanceMode::kPost) { | |
5718 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); | |
5719 } | |
5720 BranchIf(WordNotEqual(var.value(), end_index), &loop, &after_loop); | |
5721 } | |
5722 Bind(&after_loop); | |
5723 } | |
5724 | |
5725 void CodeStubAssembler::BuildFastFixedArrayForEach( | |
5726 compiler::Node* fixed_array, ElementsKind kind, | |
5727 compiler::Node* first_element_inclusive, | |
5728 compiler::Node* last_element_exclusive, | |
5729 std::function<void(CodeStubAssembler* assembler, | |
5730 compiler::Node* fixed_array, compiler::Node* offset)> | |
5731 body, | |
5732 ParameterMode mode, ForEachDirection direction) { | |
5733 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | |
5734 int32_t first_val; | |
5735 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); | |
5736 int32_t last_val; | |
5737 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); | |
5738 if (constant_first && constent_last) { | |
5739 int delta = last_val - first_val; | |
5740 DCHECK(delta >= 0); | |
5741 if (delta <= kElementLoopUnrollThreshold) { | |
5742 if (direction == ForEachDirection::kForward) { | |
5743 for (int i = first_val; i < last_val; ++i) { | |
5744 Node* index = IntPtrConstant(i); | |
5745 Node* offset = | |
5746 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, | |
5747 FixedArray::kHeaderSize - kHeapObjectTag); | |
5748 body(this, fixed_array, offset); | |
5749 } | |
5750 } else { | |
5751 for (int i = last_val - 1; i >= first_val; --i) { | |
5752 Node* index = IntPtrConstant(i); | |
5753 Node* offset = | |
5754 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, | |
5755 FixedArray::kHeaderSize - kHeapObjectTag); | |
5756 body(this, fixed_array, offset); | |
5757 } | |
5758 } | |
5759 return; | |
5760 } | |
5761 } | |
5762 | |
5763 Node* start = | |
5764 ElementOffsetFromIndex(first_element_inclusive, kind, mode, | |
5765 FixedArray::kHeaderSize - kHeapObjectTag); | |
5766 Node* limit = | |
5767 ElementOffsetFromIndex(last_element_exclusive, kind, mode, | |
5768 FixedArray::kHeaderSize - kHeapObjectTag); | |
5769 if (direction == ForEachDirection::kReverse) std::swap(start, limit); | |
5770 | |
5771 int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize; | |
5772 BuildFastLoop( | |
5773 MachineType::PointerRepresentation(), start, limit, | |
5774 [fixed_array, body](CodeStubAssembler* assembler, Node* offset) { | |
5775 body(assembler, fixed_array, offset); | |
5776 }, | |
5777 direction == ForEachDirection::kReverse ? -increment : increment, | |
5778 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre | |
5779 : IndexAdvanceMode::kPost); | |
5780 } | |
5781 | |
5747 } // namespace internal | 5782 } // namespace internal |
5748 } // namespace v8 | 5783 } // namespace v8 |
OLD | NEW |