OLD | NEW |
---|---|
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1023 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1035 int additional_offset, | 1035 int additional_offset, |
1036 ParameterMode parameter_mode) { | 1036 ParameterMode parameter_mode) { |
1037 int32_t header_size = | 1037 int32_t header_size = |
1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1040 parameter_mode, header_size); | 1040 parameter_mode, header_size); |
1041 return Load(MachineType::AnyTagged(), object, offset); | 1041 return Load(MachineType::AnyTagged(), object, offset); |
1042 } | 1042 } |
1043 | 1043 |
1044 Node* CodeStubAssembler::LoadFixedTypedArrayElement( | |
1045 Node* data_pointer, Node* index_node, ElementsKind elements_kind, | |
1046 ParameterMode parameter_mode) { | |
1047 Node* offset = | |
1048 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); | |
1049 MachineType type; | |
1050 switch (elements_kind) { | |
1051 case UINT8_ELEMENTS: /* fall through */ | |
1052 case UINT8_CLAMPED_ELEMENTS: | |
1053 type = MachineType::Uint8(); | |
1054 break; | |
1055 case INT8_ELEMENTS: | |
1056 type = MachineType::Int8(); | |
1057 break; | |
1058 case UINT16_ELEMENTS: | |
1059 type = MachineType::Uint16(); | |
1060 break; | |
1061 case INT16_ELEMENTS: | |
1062 type = MachineType::Int16(); | |
1063 break; | |
1064 case UINT32_ELEMENTS: | |
1065 type = MachineType::Uint32(); | |
1066 break; | |
1067 case INT32_ELEMENTS: | |
1068 type = MachineType::Int32(); | |
1069 break; | |
1070 case FLOAT32_ELEMENTS: | |
1071 type = MachineType::Float32(); | |
1072 break; | |
1073 case FLOAT64_ELEMENTS: | |
1074 type = MachineType::Float64(); | |
1075 break; | |
1076 default: | |
1077 UNREACHABLE(); | |
1078 } | |
1079 return Load(type, data_pointer, offset); | |
1080 } | |
1081 | |
1044 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1082 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1045 Node* object, Node* index_node, int additional_offset, | 1083 Node* object, Node* index_node, int additional_offset, |
1046 ParameterMode parameter_mode) { | 1084 ParameterMode parameter_mode) { |
1047 int32_t header_size = | 1085 int32_t header_size = |
1048 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1086 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1049 #if V8_TARGET_LITTLE_ENDIAN | 1087 #if V8_TARGET_LITTLE_ENDIAN |
1050 if (Is64()) { | 1088 if (Is64()) { |
1051 header_size += kPointerSize / 2; | 1089 header_size += kPointerSize / 2; |
1052 } | 1090 } |
1053 #endif | 1091 #endif |
(...skipping 6396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7450 Bind(&return_runtime); | 7488 Bind(&return_runtime); |
7451 { | 7489 { |
7452 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); | 7490 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); |
7453 Goto(&end); | 7491 Goto(&end); |
7454 } | 7492 } |
7455 | 7493 |
7456 Bind(&end); | 7494 Bind(&end); |
7457 return result.value(); | 7495 return result.value(); |
7458 } | 7496 } |
7459 | 7497 |
7498 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { | |
7499 Variable var_result(this, MachineRepresentation::kTagged), | |
7500 var_finc_value(this, MachineRepresentation::kFloat64); | |
7501 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); | |
7502 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); | |
7503 | |
7504 Bind(&if_issmi); | |
7505 { | |
7506 // Try fast Smi addition first. | |
7507 Node* one = SmiConstant(Smi::FromInt(1)); | |
7508 Node* pair = SmiAddWithOverflow(value, one); | |
7509 Node* overflow = Projection(1, pair); | |
7510 | |
7511 // Check if the Smi addition overflowed. | |
7512 Label if_overflow(this), if_notoverflow(this); | |
7513 Branch(overflow, &if_overflow, &if_notoverflow); | |
7514 | |
7515 Bind(&if_notoverflow); | |
7516 var_result.Bind(Projection(0, pair)); | |
7517 Goto(&end); | |
7518 | |
7519 Bind(&if_overflow); | |
7520 { | |
7521 var_finc_value.Bind(SmiToFloat64(value)); | |
7522 Goto(&do_finc); | |
7523 } | |
7524 } | |
7525 | |
7526 Bind(&if_isnotsmi); | |
7527 { | |
7528 // Check if the value is a HeapNumber. | |
7529 Assert(IsHeapNumberMap(LoadMap(value))); | |
7530 | |
7531 // Load the HeapNumber value. | |
7532 var_finc_value.Bind(LoadHeapNumberValue(value)); | |
7533 Goto(&do_finc); | |
7534 } | |
7535 | |
7536 Bind(&do_finc); | |
7537 { | |
7538 Node* finc_value = var_finc_value.value(); | |
7539 Node* one = Float64Constant(1.0); | |
7540 Node* finc_result = Float64Add(finc_value, one); | |
7541 var_result.Bind(ChangeFloat64ToTagged(finc_result)); | |
7542 Goto(&end); | |
7543 } | |
7544 | |
7545 Bind(&end); | |
7546 return var_result.value(); | |
7547 } | |
7548 | |
7549 compiler::Node* CodeStubAssembler::CreateArrayIterator( | |
7550 compiler::Node* array, compiler::Node* array_map, | |
7551 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { | |
7552 int kBaseMapIndex; | |
7553 switch (mode) { | |
7554 case IterationKind::kKeys: | |
7555 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; | |
7556 break; | |
7557 case IterationKind::kValues: | |
7558 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; | |
7559 break; | |
7560 case IterationKind::kEntries: | |
7561 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; | |
7562 break; | |
7563 } | |
7564 | |
7565 // Fast Array iterator map index: | |
Benedikt Meurer
2016/10/14 03:46:51
Nice, thanks!
| |
7566 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) | |
7567 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) | |
7568 const int kFastIteratorOffset = | |
7569 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - | |
7570 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; | |
7571 STATIC_ASSERT(kFastIteratorOffset == | |
7572 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - | |
7573 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); | |
7574 | |
7575 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) | |
7576 const int kSlowIteratorOffset = | |
7577 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - | |
7578 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; | |
7579 STATIC_ASSERT(kSlowIteratorOffset == | |
7580 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - | |
7581 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); | |
7582 | |
7583 // Assert: Type(array) is Object | |
7584 Assert(Int32GreaterThanOrEqual(array_type, | |
7585 Int32Constant(FIRST_JS_RECEIVER_TYPE))); | |
7586 | |
7587 Variable var_result(this, MachineRepresentation::kTagged); | |
7588 Variable var_map_index(this, MachineType::PointerRepresentation()); | |
7589 | |
7590 Label return_result(this); | |
7591 Label allocate_array_iterator(this); | |
7592 Label allocate_typed_array_iterator(this); | |
7593 | |
7594 if (mode == IterationKind::kKeys) { | |
7595 // There are only two key iterator maps, branch depending on whether or not | |
7596 // the receiver is a TypedArray or not. | |
7597 Label if_istypedarray(this), if_isgeneric(this); | |
7598 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), | |
7599 &if_istypedarray, &if_isgeneric); | |
7600 Bind(&if_istypedarray); | |
7601 var_map_index.Bind( | |
7602 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); | |
7603 Goto(&allocate_typed_array_iterator); | |
7604 | |
7605 Bind(&if_isgeneric); | |
7606 var_map_index.Bind( | |
7607 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); | |
7608 Goto(&allocate_array_iterator); | |
7609 } else { | |
7610 Label if_istypedarray(this), if_isgeneric(this); | |
7611 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), | |
7612 &if_istypedarray, &if_isgeneric); | |
7613 | |
7614 Bind(&if_isgeneric); | |
7615 { | |
7616 Label if_isfast(this), if_isslow(this); | |
7617 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); | |
7618 | |
7619 Bind(&if_isfast); | |
7620 { | |
7621 Node* map_index = | |
7622 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), | |
7623 LoadMapElementsKind(array_map)); | |
7624 Assert(IntPtrGreaterThanOrEqual( | |
7625 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); | |
7626 Assert(IntPtrLessThan( | |
7627 map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); | |
7628 | |
7629 var_map_index.Bind(map_index); | |
7630 Goto(&allocate_array_iterator); | |
7631 } | |
7632 | |
7633 Bind(&if_isslow); | |
7634 { | |
7635 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), | |
7636 IntPtrConstant(kSlowIteratorOffset)); | |
7637 var_map_index.Bind(map_index); | |
7638 Goto(&allocate_array_iterator); | |
7639 } | |
7640 } | |
7641 | |
7642 Bind(&if_istypedarray); | |
7643 { | |
7644 Node* map_index = | |
7645 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), | |
7646 LoadMapElementsKind(array_map)); | |
7647 Assert(IntPtrLessThan( | |
7648 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); | |
7649 Assert( | |
7650 IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); | |
7651 var_map_index.Bind(map_index); | |
7652 Goto(&allocate_typed_array_iterator); | |
7653 } | |
7654 } | |
7655 | |
7656 Bind(&allocate_array_iterator); | |
7657 { | |
7658 Node* map = | |
7659 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), | |
7660 0, CodeStubAssembler::INTPTR_PARAMETERS); | |
7661 var_result.Bind(AllocateJSArrayIterator(array, array_map, map)); | |
7662 Goto(&return_result); | |
7663 } | |
7664 | |
7665 Bind(&allocate_typed_array_iterator); | |
7666 { | |
7667 Node* map = | |
7668 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), | |
7669 0, CodeStubAssembler::INTPTR_PARAMETERS); | |
7670 var_result.Bind(AllocateJSTypedArrayIterator(array, map)); | |
7671 Goto(&return_result); | |
7672 } | |
7673 | |
7674 Bind(&return_result); | |
7675 return var_result.value(); | |
7676 } | |
7677 | |
7678 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( | |
7679 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { | |
7680 Node* iterator = Allocate(JSArrayIterator::kSize); | |
7681 StoreMapNoWriteBarrier(iterator, map); | |
7682 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, | |
7683 Heap::kEmptyFixedArrayRootIndex); | |
7684 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, | |
7685 Heap::kEmptyFixedArrayRootIndex); | |
7686 StoreObjectFieldNoWriteBarrier(iterator, | |
7687 JSArrayIterator::kIteratedObjectOffset, array); | |
7688 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, | |
7689 SmiConstant(Smi::FromInt(0))); | |
7690 StoreObjectFieldNoWriteBarrier( | |
7691 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); | |
7692 return iterator; | |
7693 } | |
7694 | |
7695 compiler::Node* CodeStubAssembler::AllocateJSTypedArrayIterator( | |
7696 compiler::Node* array, compiler::Node* map) { | |
7697 Node* iterator = Allocate(JSTypedArrayIterator::kSize); | |
7698 StoreMapNoWriteBarrier(iterator, map); | |
7699 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kPropertiesOffset, | |
7700 Heap::kEmptyFixedArrayRootIndex); | |
7701 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kElementsOffset, | |
7702 Heap::kEmptyFixedArrayRootIndex); | |
7703 StoreObjectFieldNoWriteBarrier( | |
7704 iterator, JSTypedArrayIterator::kIteratedObjectOffset, array); | |
7705 StoreObjectFieldNoWriteBarrier(iterator, | |
7706 JSTypedArrayIterator::kNextIndexOffset, | |
7707 SmiConstant(Smi::FromInt(0))); | |
7708 return iterator; | |
7709 } | |
7710 | |
7460 } // namespace internal | 7711 } // namespace internal |
7461 } // namespace v8 | 7712 } // namespace v8 |
OLD | NEW |