Index: src/code-stub-assembler.cc |
diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
index 6adec9aff6fd100da4efb4b4baa942b281566f2f..21ff8e8d1a45339e0a9e6a5b3ea882731ff36354 100644 |
--- a/src/code-stub-assembler.cc |
+++ b/src/code-stub-assembler.cc |
@@ -1041,6 +1041,44 @@ Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
return Load(MachineType::AnyTagged(), object, offset); |
} |
+Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
+ Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
+ ParameterMode parameter_mode) { |
+ Node* offset = |
+ ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); |
+ MachineType type; |
+ switch (elements_kind) { |
+ case UINT8_ELEMENTS: /* fall through */ |
+ case UINT8_CLAMPED_ELEMENTS: |
+ type = MachineType::Uint8(); |
+ break; |
+ case INT8_ELEMENTS: |
+ type = MachineType::Int8(); |
+ break; |
+ case UINT16_ELEMENTS: |
+ type = MachineType::Uint16(); |
+ break; |
+ case INT16_ELEMENTS: |
+ type = MachineType::Int16(); |
+ break; |
+ case UINT32_ELEMENTS: |
+ type = MachineType::Uint32(); |
+ break; |
+ case INT32_ELEMENTS: |
+ type = MachineType::Int32(); |
+ break; |
+ case FLOAT32_ELEMENTS: |
+ type = MachineType::Float32(); |
+ break; |
+ case FLOAT64_ELEMENTS: |
+ type = MachineType::Float64(); |
+ break; |
+ default: |
+ UNREACHABLE(); |
+ } |
+ return Load(type, data_pointer, offset); |
+} |
+ |
Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
Node* object, Node* index_node, int additional_offset, |
ParameterMode parameter_mode) { |
@@ -7242,5 +7280,221 @@ compiler::Node* CodeStubAssembler::InstanceOf(compiler::Node* object, |
return result.value(); |
} |
+compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
+ Variable var_result(this, MachineRepresentation::kTagged), |
+ var_finc_value(this, MachineRepresentation::kFloat64); |
+ Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
+ Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
+ |
+ Bind(&if_issmi); |
+ { |
+ // Try fast Smi addition first. |
+ Node* one = SmiConstant(Smi::FromInt(1)); |
+ Node* pair = SmiAddWithOverflow(value, one); |
+ Node* overflow = Projection(1, pair); |
+ |
+ // Check if the Smi addition overflowed. |
+ Label if_overflow(this), if_notoverflow(this); |
+ Branch(overflow, &if_overflow, &if_notoverflow); |
+ |
+ Bind(&if_notoverflow); |
+ var_result.Bind(Projection(0, pair)); |
+ Goto(&end); |
+ |
+ Bind(&if_overflow); |
+ { |
+ var_finc_value.Bind(SmiToFloat64(value)); |
+ Goto(&do_finc); |
+ } |
+ } |
+ |
+ Bind(&if_isnotsmi); |
+ { |
+ // Check if the value is a HeapNumber. |
+ Assert(IsHeapNumberMap(LoadMap(value))); |
+ |
+ // Load the HeapNumber value. |
+ var_finc_value.Bind(LoadHeapNumberValue(value)); |
+ Goto(&do_finc); |
+ } |
+ |
+ Bind(&do_finc); |
+ { |
+ Node* finc_value = var_finc_value.value(); |
+ Node* one = Float64Constant(1.0); |
+ Node* finc_result = Float64Add(finc_value, one); |
+ var_result.Bind(ChangeFloat64ToTagged(finc_result)); |
+ Goto(&end); |
+ } |
+ |
+ Bind(&end); |
+ return var_result.value(); |
+} |
+ |
+compiler::Node* CodeStubAssembler::CreateArrayIterator( |
+ compiler::Node* array, compiler::Node* array_map, |
+ compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
+ int kBaseMapIndex; |
+ switch (mode) { |
+ case IterationKind::kKeys: |
+ kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; |
+ break; |
+ case IterationKind::kValues: |
+ kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
+ break; |
+ case IterationKind::kEntries: |
+ kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; |
+ break; |
+ } |
+ |
+ // Fast Array iterator map index: |
+ // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) |
+ // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) |
+ const int kFastIteratorOffset = |
+ Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
+ Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
+ STATIC_ASSERT(kFastIteratorOffset == |
+ (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
+ Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
+ |
+ // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) |
+ const int kSlowIteratorOffset = |
+ Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
+ Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
+ STATIC_ASSERT(kSlowIteratorOffset == |
+ (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
+ Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
+ |
+ // Assert: Type(array) is Object |
+ Assert(Int32GreaterThanOrEqual(array_type, |
+ Int32Constant(FIRST_JS_RECEIVER_TYPE))); |
+ |
+ Variable var_result(this, MachineRepresentation::kTagged); |
+ Variable var_map_index(this, MachineType::PointerRepresentation()); |
+ |
+ Label return_result(this); |
+ Label allocate_array_iterator(this); |
+ Label allocate_typed_array_iterator(this); |
+ |
+ var_result.Bind(UndefinedConstant()); |
+ var_map_index.Bind(IntPtrConstant(0)); |
+ |
+ if (mode == IterationKind::kKeys) { |
+ // There are only two key iterator maps, branch depending on whether or not |
+ // the receiver is a TypedArray or not. |
+ Label if_istypedarray(this), if_isgeneric(this); |
+ Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
+ &if_istypedarray, &if_isgeneric); |
+ Bind(&if_istypedarray); |
+ var_map_index.Bind( |
+ IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
+ Goto(&allocate_typed_array_iterator); |
+ |
+ Bind(&if_isgeneric); |
+ var_map_index.Bind( |
+ IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
+ Goto(&allocate_array_iterator); |
+ } else { |
+ Label if_istypedarray(this), if_isgeneric(this); |
+ Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
+ &if_istypedarray, &if_isgeneric); |
+ |
+ Bind(&if_isgeneric); |
+ { |
+ Label if_isfast(this), if_isslow(this); |
+ BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
+ |
+ Bind(&if_isfast); |
+ { |
+ Node* map_index = |
+ IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), |
+ LoadMapElementsKind(array_map)); |
+ Assert(IntPtrGreaterThanOrEqual( |
+ map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
+ Assert(IntPtrLessThan( |
+ map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); |
+ |
+ var_map_index.Bind(map_index); |
+ Goto(&allocate_array_iterator); |
+ } |
+ |
+ Bind(&if_isslow); |
+ { |
+ Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), |
+ IntPtrConstant(kSlowIteratorOffset)); |
+ var_map_index.Bind(map_index); |
+ Goto(&allocate_array_iterator); |
+ } |
+ } |
+ |
+ Bind(&if_istypedarray); |
+ { |
+ Node* map_index = |
+ IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), |
+ LoadMapElementsKind(array_map)); |
+ Assert(IntPtrLessThan( |
+ map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
+ Assert( |
+ IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); |
+ var_map_index.Bind(map_index); |
+ Goto(&allocate_typed_array_iterator); |
+ } |
+ } |
+ |
+ Bind(&allocate_array_iterator); |
+ { |
+ Node* map = |
+ LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
+ 0, CodeStubAssembler::INTPTR_PARAMETERS); |
+ var_result.Bind(AllocateJSArrayIterator(array, array_map, map)); |
+ Goto(&return_result); |
+ } |
+ |
+ Bind(&allocate_typed_array_iterator); |
+ { |
+ Node* map = |
+ LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
+ 0, CodeStubAssembler::INTPTR_PARAMETERS); |
+ var_result.Bind(AllocateJSTypedArrayIterator(array, map)); |
+ Goto(&return_result); |
+ } |
+ |
+ Bind(&return_result); |
+ return var_result.value(); |
+} |
+ |
+compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
+ compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
+ Node* iterator = Allocate(JSArrayIterator::kSize); |
+ StoreMapNoWriteBarrier(iterator, map); |
+ StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
+ Heap::kEmptyFixedArrayRootIndex); |
+ StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, |
+ Heap::kEmptyFixedArrayRootIndex); |
+ StoreObjectFieldNoWriteBarrier(iterator, |
+ JSArrayIterator::kIteratedObjectOffset, array); |
+ StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, |
+ SmiConstant(Smi::FromInt(0))); |
+ StoreObjectFieldNoWriteBarrier( |
+ iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); |
+ return iterator; |
+} |
+ |
+compiler::Node* CodeStubAssembler::AllocateJSTypedArrayIterator( |
+ compiler::Node* array, compiler::Node* map) { |
+ Node* iterator = Allocate(JSTypedArrayIterator::kSize); |
+ StoreMapNoWriteBarrier(iterator, map); |
+ StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kPropertiesOffset, |
+ Heap::kEmptyFixedArrayRootIndex); |
+ StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kElementsOffset, |
+ Heap::kEmptyFixedArrayRootIndex); |
+ StoreObjectFieldNoWriteBarrier( |
+ iterator, JSTypedArrayIterator::kIteratedObjectOffset, array); |
+ StoreObjectFieldNoWriteBarrier(iterator, |
+ JSTypedArrayIterator::kNextIndexOffset, |
+ SmiConstant(Smi::FromInt(0))); |
+ return iterator; |
+} |
+ |
} // namespace internal |
} // namespace v8 |