OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1023 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1035 int additional_offset, | 1035 int additional_offset, |
1036 ParameterMode parameter_mode) { | 1036 ParameterMode parameter_mode) { |
1037 int32_t header_size = | 1037 int32_t header_size = |
1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1040 parameter_mode, header_size); | 1040 parameter_mode, header_size); |
1041 return Load(MachineType::AnyTagged(), object, offset); | 1041 return Load(MachineType::AnyTagged(), object, offset); |
1042 } | 1042 } |
1043 | 1043 |
| 1044 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
| 1045 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
| 1046 ParameterMode parameter_mode) { |
| 1047 Node* offset = |
| 1048 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); |
| 1049 MachineType type; |
| 1050 switch (elements_kind) { |
| 1051 case UINT8_ELEMENTS: /* fall through */ |
| 1052 case UINT8_CLAMPED_ELEMENTS: |
| 1053 type = MachineType::Uint8(); |
| 1054 break; |
| 1055 case INT8_ELEMENTS: |
| 1056 type = MachineType::Int8(); |
| 1057 break; |
| 1058 case UINT16_ELEMENTS: |
| 1059 type = MachineType::Uint16(); |
| 1060 break; |
| 1061 case INT16_ELEMENTS: |
| 1062 type = MachineType::Int16(); |
| 1063 break; |
| 1064 case UINT32_ELEMENTS: |
| 1065 type = MachineType::Uint32(); |
| 1066 break; |
| 1067 case INT32_ELEMENTS: |
| 1068 type = MachineType::Int32(); |
| 1069 break; |
| 1070 case FLOAT32_ELEMENTS: |
| 1071 type = MachineType::Float32(); |
| 1072 break; |
| 1073 case FLOAT64_ELEMENTS: |
| 1074 type = MachineType::Float64(); |
| 1075 break; |
| 1076 default: |
| 1077 UNREACHABLE(); |
| 1078 } |
| 1079 return Load(type, data_pointer, offset); |
| 1080 } |
| 1081 |
1044 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1082 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1045 Node* object, Node* index_node, int additional_offset, | 1083 Node* object, Node* index_node, int additional_offset, |
1046 ParameterMode parameter_mode) { | 1084 ParameterMode parameter_mode) { |
1047 int32_t header_size = | 1085 int32_t header_size = |
1048 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1086 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1049 #if V8_TARGET_LITTLE_ENDIAN | 1087 #if V8_TARGET_LITTLE_ENDIAN |
1050 if (Is64()) { | 1088 if (Is64()) { |
1051 header_size += kPointerSize / 2; | 1089 header_size += kPointerSize / 2; |
1052 } | 1090 } |
1053 #endif | 1091 #endif |
(...skipping 6396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7450 Bind(&return_runtime); | 7488 Bind(&return_runtime); |
7451 { | 7489 { |
7452 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); | 7490 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); |
7453 Goto(&end); | 7491 Goto(&end); |
7454 } | 7492 } |
7455 | 7493 |
7456 Bind(&end); | 7494 Bind(&end); |
7457 return result.value(); | 7495 return result.value(); |
7458 } | 7496 } |
7459 | 7497 |
| 7498 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
| 7499 Variable var_result(this, MachineRepresentation::kTagged), |
| 7500 var_finc_value(this, MachineRepresentation::kFloat64); |
| 7501 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
| 7502 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 7503 |
| 7504 Bind(&if_issmi); |
| 7505 { |
| 7506 // Try fast Smi addition first. |
| 7507 Node* one = SmiConstant(Smi::FromInt(1)); |
| 7508 Node* pair = SmiAddWithOverflow(value, one); |
| 7509 Node* overflow = Projection(1, pair); |
| 7510 |
| 7511 // Check if the Smi addition overflowed. |
| 7512 Label if_overflow(this), if_notoverflow(this); |
| 7513 Branch(overflow, &if_overflow, &if_notoverflow); |
| 7514 |
| 7515 Bind(&if_notoverflow); |
| 7516 var_result.Bind(Projection(0, pair)); |
| 7517 Goto(&end); |
| 7518 |
| 7519 Bind(&if_overflow); |
| 7520 { |
| 7521 var_finc_value.Bind(SmiToFloat64(value)); |
| 7522 Goto(&do_finc); |
| 7523 } |
| 7524 } |
| 7525 |
| 7526 Bind(&if_isnotsmi); |
| 7527 { |
| 7528 // Check if the value is a HeapNumber. |
| 7529 Assert(IsHeapNumberMap(LoadMap(value))); |
| 7530 |
| 7531 // Load the HeapNumber value. |
| 7532 var_finc_value.Bind(LoadHeapNumberValue(value)); |
| 7533 Goto(&do_finc); |
| 7534 } |
| 7535 |
| 7536 Bind(&do_finc); |
| 7537 { |
| 7538 Node* finc_value = var_finc_value.value(); |
| 7539 Node* one = Float64Constant(1.0); |
| 7540 Node* finc_result = Float64Add(finc_value, one); |
| 7541 var_result.Bind(ChangeFloat64ToTagged(finc_result)); |
| 7542 Goto(&end); |
| 7543 } |
| 7544 |
| 7545 Bind(&end); |
| 7546 return var_result.value(); |
| 7547 } |
| 7548 |
| 7549 compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| 7550 compiler::Node* array, compiler::Node* array_map, |
| 7551 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
| 7552 int kBaseMapIndex; |
| 7553 switch (mode) { |
| 7554 case IterationKind::kKeys: |
| 7555 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; |
| 7556 break; |
| 7557 case IterationKind::kValues: |
| 7558 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7559 break; |
| 7560 case IterationKind::kEntries: |
| 7561 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; |
| 7562 break; |
| 7563 } |
| 7564 |
| 7565 // Fast Array iterator map index: |
| 7566 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) |
| 7567 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) |
| 7568 const int kFastIteratorOffset = |
| 7569 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7570 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7571 STATIC_ASSERT(kFastIteratorOffset == |
| 7572 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7573 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7574 |
| 7575 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) |
| 7576 const int kSlowIteratorOffset = |
| 7577 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7578 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7579 STATIC_ASSERT(kSlowIteratorOffset == |
| 7580 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7581 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7582 |
| 7583 // Assert: Type(array) is Object |
| 7584 Assert(Int32GreaterThanOrEqual(array_type, |
| 7585 Int32Constant(FIRST_JS_RECEIVER_TYPE))); |
| 7586 |
| 7587 Variable var_result(this, MachineRepresentation::kTagged); |
| 7588 Variable var_map_index(this, MachineType::PointerRepresentation()); |
| 7589 |
| 7590 Label return_result(this); |
| 7591 Label allocate_array_iterator(this); |
| 7592 Label allocate_typed_array_iterator(this); |
| 7593 |
| 7594 var_result.Bind(UndefinedConstant()); |
| 7595 var_map_index.Bind(IntPtrConstant(0)); |
| 7596 |
| 7597 if (mode == IterationKind::kKeys) { |
| 7598 // There are only two key iterator maps, branch depending on whether or not |
| 7599 // the receiver is a TypedArray or not. |
| 7600 Label if_istypedarray(this), if_isgeneric(this); |
| 7601 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7602 &if_istypedarray, &if_isgeneric); |
| 7603 Bind(&if_istypedarray); |
| 7604 var_map_index.Bind( |
| 7605 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7606 Goto(&allocate_typed_array_iterator); |
| 7607 |
| 7608 Bind(&if_isgeneric); |
| 7609 var_map_index.Bind( |
| 7610 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7611 Goto(&allocate_array_iterator); |
| 7612 } else { |
| 7613 Label if_istypedarray(this), if_isgeneric(this); |
| 7614 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7615 &if_istypedarray, &if_isgeneric); |
| 7616 |
| 7617 Bind(&if_isgeneric); |
| 7618 { |
| 7619 Label if_isfast(this), if_isslow(this); |
| 7620 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| 7621 |
| 7622 Bind(&if_isfast); |
| 7623 { |
| 7624 Node* map_index = |
| 7625 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), |
| 7626 LoadMapElementsKind(array_map)); |
| 7627 Assert(IntPtrGreaterThanOrEqual( |
| 7628 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7629 Assert(IntPtrLessThan( |
| 7630 map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); |
| 7631 |
| 7632 var_map_index.Bind(map_index); |
| 7633 Goto(&allocate_array_iterator); |
| 7634 } |
| 7635 |
| 7636 Bind(&if_isslow); |
| 7637 { |
| 7638 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), |
| 7639 IntPtrConstant(kSlowIteratorOffset)); |
| 7640 var_map_index.Bind(map_index); |
| 7641 Goto(&allocate_array_iterator); |
| 7642 } |
| 7643 } |
| 7644 |
| 7645 Bind(&if_istypedarray); |
| 7646 { |
| 7647 Node* map_index = |
| 7648 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), |
| 7649 LoadMapElementsKind(array_map)); |
| 7650 Assert(IntPtrLessThan( |
| 7651 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7652 Assert( |
| 7653 IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); |
| 7654 var_map_index.Bind(map_index); |
| 7655 Goto(&allocate_typed_array_iterator); |
| 7656 } |
| 7657 } |
| 7658 |
| 7659 Bind(&allocate_array_iterator); |
| 7660 { |
| 7661 Node* map = |
| 7662 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7663 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7664 var_result.Bind(AllocateJSArrayIterator(array, array_map, map)); |
| 7665 Goto(&return_result); |
| 7666 } |
| 7667 |
| 7668 Bind(&allocate_typed_array_iterator); |
| 7669 { |
| 7670 Node* map = |
| 7671 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7672 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7673 var_result.Bind(AllocateJSTypedArrayIterator(array, map)); |
| 7674 Goto(&return_result); |
| 7675 } |
| 7676 |
| 7677 Bind(&return_result); |
| 7678 return var_result.value(); |
| 7679 } |
| 7680 |
| 7681 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
| 7682 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
| 7683 Node* iterator = Allocate(JSArrayIterator::kSize); |
| 7684 StoreMapNoWriteBarrier(iterator, map); |
| 7685 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
| 7686 Heap::kEmptyFixedArrayRootIndex); |
| 7687 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, |
| 7688 Heap::kEmptyFixedArrayRootIndex); |
| 7689 StoreObjectFieldNoWriteBarrier(iterator, |
| 7690 JSArrayIterator::kIteratedObjectOffset, array); |
| 7691 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, |
| 7692 SmiConstant(Smi::FromInt(0))); |
| 7693 StoreObjectFieldNoWriteBarrier( |
| 7694 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); |
| 7695 return iterator; |
| 7696 } |
| 7697 |
| 7698 compiler::Node* CodeStubAssembler::AllocateJSTypedArrayIterator( |
| 7699 compiler::Node* array, compiler::Node* map) { |
| 7700 Node* iterator = Allocate(JSTypedArrayIterator::kSize); |
| 7701 StoreMapNoWriteBarrier(iterator, map); |
| 7702 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kPropertiesOffset, |
| 7703 Heap::kEmptyFixedArrayRootIndex); |
| 7704 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kElementsOffset, |
| 7705 Heap::kEmptyFixedArrayRootIndex); |
| 7706 StoreObjectFieldNoWriteBarrier( |
| 7707 iterator, JSTypedArrayIterator::kIteratedObjectOffset, array); |
| 7708 StoreObjectFieldNoWriteBarrier(iterator, |
| 7709 JSTypedArrayIterator::kNextIndexOffset, |
| 7710 SmiConstant(Smi::FromInt(0))); |
| 7711 return iterator; |
| 7712 } |
| 7713 |
7460 } // namespace internal | 7714 } // namespace internal |
7461 } // namespace v8 | 7715 } // namespace v8 |
OLD | NEW |