OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1034 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1045 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1045 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1046 int additional_offset, | 1046 int additional_offset, |
1047 ParameterMode parameter_mode) { | 1047 ParameterMode parameter_mode) { |
1048 int32_t header_size = | 1048 int32_t header_size = |
1049 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1049 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1050 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1050 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1051 parameter_mode, header_size); | 1051 parameter_mode, header_size); |
1052 return Load(MachineType::AnyTagged(), object, offset); | 1052 return Load(MachineType::AnyTagged(), object, offset); |
1053 } | 1053 } |
1054 | 1054 |
| 1055 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
| 1056 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
| 1057 ParameterMode parameter_mode) { |
| 1058 Node* offset = |
| 1059 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); |
| 1060 MachineType type; |
| 1061 switch (elements_kind) { |
| 1062 case UINT8_ELEMENTS: /* fall through */ |
| 1063 case UINT8_CLAMPED_ELEMENTS: |
| 1064 type = MachineType::Uint8(); |
| 1065 break; |
| 1066 case INT8_ELEMENTS: |
| 1067 type = MachineType::Int8(); |
| 1068 break; |
| 1069 case UINT16_ELEMENTS: |
| 1070 type = MachineType::Uint16(); |
| 1071 break; |
| 1072 case INT16_ELEMENTS: |
| 1073 type = MachineType::Int16(); |
| 1074 break; |
| 1075 case UINT32_ELEMENTS: |
| 1076 type = MachineType::Uint32(); |
| 1077 break; |
| 1078 case INT32_ELEMENTS: |
| 1079 type = MachineType::Int32(); |
| 1080 break; |
| 1081 case FLOAT32_ELEMENTS: |
| 1082 type = MachineType::Float32(); |
| 1083 break; |
| 1084 case FLOAT64_ELEMENTS: |
| 1085 type = MachineType::Float64(); |
| 1086 break; |
| 1087 default: |
| 1088 UNREACHABLE(); |
| 1089 } |
| 1090 return Load(type, data_pointer, offset); |
| 1091 } |
| 1092 |
1055 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1093 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1056 Node* object, Node* index_node, int additional_offset, | 1094 Node* object, Node* index_node, int additional_offset, |
1057 ParameterMode parameter_mode) { | 1095 ParameterMode parameter_mode) { |
1058 int32_t header_size = | 1096 int32_t header_size = |
1059 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1097 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1060 #if V8_TARGET_LITTLE_ENDIAN | 1098 #if V8_TARGET_LITTLE_ENDIAN |
1061 if (Is64()) { | 1099 if (Is64()) { |
1062 header_size += kPointerSize / 2; | 1100 header_size += kPointerSize / 2; |
1063 } | 1101 } |
1064 #endif | 1102 #endif |
(...skipping 6418 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7483 Bind(&return_runtime); | 7521 Bind(&return_runtime); |
7484 { | 7522 { |
7485 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); | 7523 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); |
7486 Goto(&end); | 7524 Goto(&end); |
7487 } | 7525 } |
7488 | 7526 |
7489 Bind(&end); | 7527 Bind(&end); |
7490 return result.value(); | 7528 return result.value(); |
7491 } | 7529 } |
7492 | 7530 |
| 7531 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
| 7532 Variable var_result(this, MachineRepresentation::kTagged), |
| 7533 var_finc_value(this, MachineRepresentation::kFloat64); |
| 7534 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
| 7535 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 7536 |
| 7537 Bind(&if_issmi); |
| 7538 { |
| 7539 // Try fast Smi addition first. |
| 7540 Node* one = SmiConstant(Smi::FromInt(1)); |
| 7541 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), |
| 7542 BitcastTaggedToWord(one)); |
| 7543 Node* overflow = Projection(1, pair); |
| 7544 |
| 7545 // Check if the Smi addition overflowed. |
| 7546 Label if_overflow(this), if_notoverflow(this); |
| 7547 Branch(overflow, &if_overflow, &if_notoverflow); |
| 7548 |
| 7549 Bind(&if_notoverflow); |
| 7550 var_result.Bind(Projection(0, pair)); |
| 7551 Goto(&end); |
| 7552 |
| 7553 Bind(&if_overflow); |
| 7554 { |
| 7555 var_finc_value.Bind(SmiToFloat64(value)); |
| 7556 Goto(&do_finc); |
| 7557 } |
| 7558 } |
| 7559 |
| 7560 Bind(&if_isnotsmi); |
| 7561 { |
| 7562 // Check if the value is a HeapNumber. |
| 7563 Assert(IsHeapNumberMap(LoadMap(value))); |
| 7564 |
| 7565 // Load the HeapNumber value. |
| 7566 var_finc_value.Bind(LoadHeapNumberValue(value)); |
| 7567 Goto(&do_finc); |
| 7568 } |
| 7569 |
| 7570 Bind(&do_finc); |
| 7571 { |
| 7572 Node* finc_value = var_finc_value.value(); |
| 7573 Node* one = Float64Constant(1.0); |
| 7574 Node* finc_result = Float64Add(finc_value, one); |
| 7575 var_result.Bind(ChangeFloat64ToTagged(finc_result)); |
| 7576 Goto(&end); |
| 7577 } |
| 7578 |
| 7579 Bind(&end); |
| 7580 return var_result.value(); |
| 7581 } |
| 7582 |
| 7583 compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| 7584 compiler::Node* array, compiler::Node* array_map, |
| 7585 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
| 7586 int kBaseMapIndex; |
| 7587 switch (mode) { |
| 7588 case IterationKind::kKeys: |
| 7589 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; |
| 7590 break; |
| 7591 case IterationKind::kValues: |
| 7592 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7593 break; |
| 7594 case IterationKind::kEntries: |
| 7595 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; |
| 7596 break; |
| 7597 } |
| 7598 |
| 7599 // Fast Array iterator map index: |
| 7600 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) |
| 7601 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) |
| 7602 const int kFastIteratorOffset = |
| 7603 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7604 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7605 STATIC_ASSERT(kFastIteratorOffset == |
| 7606 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7607 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7608 |
| 7609 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) |
| 7610 const int kSlowIteratorOffset = |
| 7611 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7612 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7613 STATIC_ASSERT(kSlowIteratorOffset == |
| 7614 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7615 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7616 |
| 7617 // Assert: Type(array) is Object |
| 7618 Assert(Int32GreaterThanOrEqual(array_type, |
| 7619 Int32Constant(FIRST_JS_RECEIVER_TYPE))); |
| 7620 |
| 7621 Variable var_result(this, MachineRepresentation::kTagged); |
| 7622 Variable var_map_index(this, MachineType::PointerRepresentation()); |
| 7623 Variable var_array_map(this, MachineRepresentation::kTagged); |
| 7624 |
| 7625 Label return_result(this); |
| 7626 Label allocate_iterator(this); |
| 7627 |
| 7628 if (mode == IterationKind::kKeys) { |
| 7629 // There are only two key iterator maps, branch depending on whether or not |
| 7630 // the receiver is a TypedArray or not. |
| 7631 Label if_istypedarray(this), if_isgeneric(this); |
| 7632 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7633 &if_istypedarray, &if_isgeneric); |
| 7634 Bind(&if_istypedarray); |
| 7635 var_map_index.Bind( |
| 7636 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7637 var_array_map.Bind(UndefinedConstant()); |
| 7638 Goto(&allocate_iterator); |
| 7639 |
| 7640 Bind(&if_isgeneric); |
| 7641 var_map_index.Bind( |
| 7642 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7643 var_array_map.Bind(array_map); |
| 7644 Goto(&allocate_iterator); |
| 7645 } else { |
| 7646 Label if_istypedarray(this), if_isgeneric(this); |
| 7647 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7648 &if_istypedarray, &if_isgeneric); |
| 7649 |
| 7650 Bind(&if_isgeneric); |
| 7651 { |
| 7652 Label if_isfast(this), if_isslow(this); |
| 7653 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| 7654 |
| 7655 Bind(&if_isfast); |
| 7656 { |
| 7657 Node* map_index = |
| 7658 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), |
| 7659 LoadMapElementsKind(array_map)); |
| 7660 Assert(IntPtrGreaterThanOrEqual( |
| 7661 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7662 Assert(IntPtrLessThan( |
| 7663 map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); |
| 7664 |
| 7665 var_map_index.Bind(map_index); |
| 7666 var_array_map.Bind(array_map); |
| 7667 Goto(&allocate_iterator); |
| 7668 } |
| 7669 |
| 7670 Bind(&if_isslow); |
| 7671 { |
| 7672 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), |
| 7673 IntPtrConstant(kSlowIteratorOffset)); |
| 7674 var_map_index.Bind(map_index); |
| 7675 var_array_map.Bind(UndefinedConstant()); |
| 7676 Goto(&allocate_iterator); |
| 7677 } |
| 7678 } |
| 7679 |
| 7680 Bind(&if_istypedarray); |
| 7681 { |
| 7682 Node* map_index = |
| 7683 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), |
| 7684 LoadMapElementsKind(array_map)); |
| 7685 Assert(IntPtrLessThan( |
| 7686 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7687 Assert( |
| 7688 IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); |
| 7689 var_map_index.Bind(map_index); |
| 7690 var_array_map.Bind(UndefinedConstant()); |
| 7691 Goto(&allocate_iterator); |
| 7692 } |
| 7693 } |
| 7694 |
| 7695 Bind(&allocate_iterator); |
| 7696 { |
| 7697 Node* map = |
| 7698 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7699 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7700 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map)); |
| 7701 Goto(&return_result); |
| 7702 } |
| 7703 |
| 7704 Bind(&return_result); |
| 7705 return var_result.value(); |
| 7706 } |
| 7707 |
| 7708 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
| 7709 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
| 7710 Node* iterator = Allocate(JSArrayIterator::kSize); |
| 7711 StoreMapNoWriteBarrier(iterator, map); |
| 7712 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
| 7713 Heap::kEmptyFixedArrayRootIndex); |
| 7714 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, |
| 7715 Heap::kEmptyFixedArrayRootIndex); |
| 7716 StoreObjectFieldNoWriteBarrier(iterator, |
| 7717 JSArrayIterator::kIteratedObjectOffset, array); |
| 7718 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, |
| 7719 SmiConstant(Smi::FromInt(0))); |
| 7720 StoreObjectFieldNoWriteBarrier( |
| 7721 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); |
| 7722 return iterator; |
| 7723 } |
| 7724 |
7493 } // namespace internal | 7725 } // namespace internal |
7494 } // namespace v8 | 7726 } // namespace v8 |
OLD | NEW |