OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1023 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1034 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1035 int additional_offset, | 1035 int additional_offset, |
1036 ParameterMode parameter_mode) { | 1036 ParameterMode parameter_mode) { |
1037 int32_t header_size = | 1037 int32_t header_size = |
1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1038 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1039 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1040 parameter_mode, header_size); | 1040 parameter_mode, header_size); |
1041 return Load(MachineType::AnyTagged(), object, offset); | 1041 return Load(MachineType::AnyTagged(), object, offset); |
1042 } | 1042 } |
1043 | 1043 |
| 1044 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
| 1045 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
| 1046 ParameterMode parameter_mode) { |
| 1047 Node* offset = |
| 1048 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); |
| 1049 MachineType type; |
| 1050 switch (elements_kind) { |
| 1051 case UINT8_ELEMENTS: /* fall through */ |
| 1052 case UINT8_CLAMPED_ELEMENTS: |
| 1053 type = MachineType::Uint8(); |
| 1054 break; |
| 1055 case INT8_ELEMENTS: |
| 1056 type = MachineType::Int8(); |
| 1057 break; |
| 1058 case UINT16_ELEMENTS: |
| 1059 type = MachineType::Uint16(); |
| 1060 break; |
| 1061 case INT16_ELEMENTS: |
| 1062 type = MachineType::Int16(); |
| 1063 break; |
| 1064 case UINT32_ELEMENTS: |
| 1065 type = MachineType::Uint32(); |
| 1066 break; |
| 1067 case INT32_ELEMENTS: |
| 1068 type = MachineType::Int32(); |
| 1069 break; |
| 1070 case FLOAT32_ELEMENTS: |
| 1071 type = MachineType::Float32(); |
| 1072 break; |
| 1073 case FLOAT64_ELEMENTS: |
| 1074 type = MachineType::Float64(); |
| 1075 break; |
| 1076 default: |
| 1077 UNREACHABLE(); |
| 1078 } |
| 1079 return Load(type, data_pointer, offset); |
| 1080 } |
| 1081 |
1044 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1082 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1045 Node* object, Node* index_node, int additional_offset, | 1083 Node* object, Node* index_node, int additional_offset, |
1046 ParameterMode parameter_mode) { | 1084 ParameterMode parameter_mode) { |
1047 int32_t header_size = | 1085 int32_t header_size = |
1048 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1086 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1049 #if V8_TARGET_LITTLE_ENDIAN | 1087 #if V8_TARGET_LITTLE_ENDIAN |
1050 if (Is64()) { | 1088 if (Is64()) { |
1051 header_size += kPointerSize / 2; | 1089 header_size += kPointerSize / 2; |
1052 } | 1090 } |
1053 #endif | 1091 #endif |
(...skipping 6181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7235 Bind(&return_runtime); | 7273 Bind(&return_runtime); |
7236 { | 7274 { |
7237 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); | 7275 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); |
7238 Goto(&end); | 7276 Goto(&end); |
7239 } | 7277 } |
7240 | 7278 |
7241 Bind(&end); | 7279 Bind(&end); |
7242 return result.value(); | 7280 return result.value(); |
7243 } | 7281 } |
7244 | 7282 |
| 7283 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
| 7284 Variable var_result(this, MachineRepresentation::kTagged), |
| 7285 var_finc_value(this, MachineRepresentation::kFloat64); |
| 7286 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
| 7287 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 7288 |
| 7289 Bind(&if_issmi); |
| 7290 { |
| 7291 // Try fast Smi addition first. |
| 7292 Node* one = SmiConstant(Smi::FromInt(1)); |
| 7293 Node* pair = SmiAddWithOverflow(value, one); |
| 7294 Node* overflow = Projection(1, pair); |
| 7295 |
| 7296 // Check if the Smi addition overflowed. |
| 7297 Label if_overflow(this), if_notoverflow(this); |
| 7298 Branch(overflow, &if_overflow, &if_notoverflow); |
| 7299 |
| 7300 Bind(&if_notoverflow); |
| 7301 var_result.Bind(Projection(0, pair)); |
| 7302 Goto(&end); |
| 7303 |
| 7304 Bind(&if_overflow); |
| 7305 { |
| 7306 var_finc_value.Bind(SmiToFloat64(value)); |
| 7307 Goto(&do_finc); |
| 7308 } |
| 7309 } |
| 7310 |
| 7311 Bind(&if_isnotsmi); |
| 7312 { |
| 7313 // Check if the value is a HeapNumber. |
| 7314 Assert(IsHeapNumberMap(LoadMap(value))); |
| 7315 |
| 7316 // Load the HeapNumber value. |
| 7317 var_finc_value.Bind(LoadHeapNumberValue(value)); |
| 7318 Goto(&do_finc); |
| 7319 } |
| 7320 |
| 7321 Bind(&do_finc); |
| 7322 { |
| 7323 Node* finc_value = var_finc_value.value(); |
| 7324 Node* one = Float64Constant(1.0); |
| 7325 Node* finc_result = Float64Add(finc_value, one); |
| 7326 var_result.Bind(ChangeFloat64ToTagged(finc_result)); |
| 7327 Goto(&end); |
| 7328 } |
| 7329 |
| 7330 Bind(&end); |
| 7331 return var_result.value(); |
| 7332 } |
| 7333 |
| 7334 compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| 7335 compiler::Node* array, compiler::Node* array_map, |
| 7336 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
| 7337 int kBaseMapIndex; |
| 7338 switch (mode) { |
| 7339 case IterationKind::kKeys: |
| 7340 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; |
| 7341 break; |
| 7342 case IterationKind::kValues: |
| 7343 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7344 break; |
| 7345 case IterationKind::kEntries: |
| 7346 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; |
| 7347 break; |
| 7348 } |
| 7349 |
| 7350 // Fast Array iterator map index: |
| 7351 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) |
| 7352 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) |
| 7353 const int kFastIteratorOffset = |
| 7354 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7355 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7356 STATIC_ASSERT(kFastIteratorOffset == |
| 7357 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7358 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7359 |
| 7360 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) |
| 7361 const int kSlowIteratorOffset = |
| 7362 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7363 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7364 STATIC_ASSERT(kSlowIteratorOffset == |
| 7365 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7366 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7367 |
| 7368 // Assert: Type(array) is Object |
| 7369 Assert(Int32GreaterThanOrEqual(array_type, |
| 7370 Int32Constant(FIRST_JS_RECEIVER_TYPE))); |
| 7371 |
| 7372 Variable var_result(this, MachineRepresentation::kTagged); |
| 7373 Variable var_map_index(this, MachineType::PointerRepresentation()); |
| 7374 |
| 7375 Label return_result(this); |
| 7376 Label allocate_array_iterator(this); |
| 7377 Label allocate_typed_array_iterator(this); |
| 7378 |
| 7379 var_result.Bind(UndefinedConstant()); |
| 7380 var_map_index.Bind(IntPtrConstant(0)); |
| 7381 |
| 7382 if (mode == IterationKind::kKeys) { |
| 7383 // There are only two key iterator maps, branch depending on whether or not |
| 7384 // the receiver is a TypedArray or not. |
| 7385 Label if_istypedarray(this), if_isgeneric(this); |
| 7386 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7387 &if_istypedarray, &if_isgeneric); |
| 7388 Bind(&if_istypedarray); |
| 7389 var_map_index.Bind( |
| 7390 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7391 Goto(&allocate_typed_array_iterator); |
| 7392 |
| 7393 Bind(&if_isgeneric); |
| 7394 var_map_index.Bind( |
| 7395 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7396 Goto(&allocate_array_iterator); |
| 7397 } else { |
| 7398 Label if_istypedarray(this), if_isgeneric(this); |
| 7399 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7400 &if_istypedarray, &if_isgeneric); |
| 7401 |
| 7402 Bind(&if_isgeneric); |
| 7403 { |
| 7404 Label if_isfast(this), if_isslow(this); |
| 7405 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| 7406 |
| 7407 Bind(&if_isfast); |
| 7408 { |
| 7409 Node* map_index = |
| 7410 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), |
| 7411 LoadMapElementsKind(array_map)); |
| 7412 Assert(IntPtrGreaterThanOrEqual( |
| 7413 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7414 Assert(IntPtrLessThan( |
| 7415 map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); |
| 7416 |
| 7417 var_map_index.Bind(map_index); |
| 7418 Goto(&allocate_array_iterator); |
| 7419 } |
| 7420 |
| 7421 Bind(&if_isslow); |
| 7422 { |
| 7423 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), |
| 7424 IntPtrConstant(kSlowIteratorOffset)); |
| 7425 var_map_index.Bind(map_index); |
| 7426 Goto(&allocate_array_iterator); |
| 7427 } |
| 7428 } |
| 7429 |
| 7430 Bind(&if_istypedarray); |
| 7431 { |
| 7432 Node* map_index = |
| 7433 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), |
| 7434 LoadMapElementsKind(array_map)); |
| 7435 Assert(IntPtrLessThan( |
| 7436 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7437 Assert( |
| 7438 IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); |
| 7439 var_map_index.Bind(map_index); |
| 7440 Goto(&allocate_typed_array_iterator); |
| 7441 } |
| 7442 } |
| 7443 |
| 7444 Bind(&allocate_array_iterator); |
| 7445 { |
| 7446 Node* map = |
| 7447 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7448 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7449 var_result.Bind(AllocateJSArrayIterator(array, array_map, map)); |
| 7450 Goto(&return_result); |
| 7451 } |
| 7452 |
| 7453 Bind(&allocate_typed_array_iterator); |
| 7454 { |
| 7455 Node* map = |
| 7456 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7457 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7458 var_result.Bind(AllocateJSTypedArrayIterator(array, map)); |
| 7459 Goto(&return_result); |
| 7460 } |
| 7461 |
| 7462 Bind(&return_result); |
| 7463 return var_result.value(); |
| 7464 } |
| 7465 |
| 7466 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
| 7467 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
| 7468 Node* iterator = Allocate(JSArrayIterator::kSize); |
| 7469 StoreMapNoWriteBarrier(iterator, map); |
| 7470 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
| 7471 Heap::kEmptyFixedArrayRootIndex); |
| 7472 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, |
| 7473 Heap::kEmptyFixedArrayRootIndex); |
| 7474 StoreObjectFieldNoWriteBarrier(iterator, |
| 7475 JSArrayIterator::kIteratedObjectOffset, array); |
| 7476 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, |
| 7477 SmiConstant(Smi::FromInt(0))); |
| 7478 StoreObjectFieldNoWriteBarrier( |
| 7479 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); |
| 7480 return iterator; |
| 7481 } |
| 7482 |
| 7483 compiler::Node* CodeStubAssembler::AllocateJSTypedArrayIterator( |
| 7484 compiler::Node* array, compiler::Node* map) { |
| 7485 Node* iterator = Allocate(JSTypedArrayIterator::kSize); |
| 7486 StoreMapNoWriteBarrier(iterator, map); |
| 7487 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kPropertiesOffset, |
| 7488 Heap::kEmptyFixedArrayRootIndex); |
| 7489 StoreObjectFieldRoot(iterator, JSTypedArrayIterator::kElementsOffset, |
| 7490 Heap::kEmptyFixedArrayRootIndex); |
| 7491 StoreObjectFieldNoWriteBarrier( |
| 7492 iterator, JSTypedArrayIterator::kIteratedObjectOffset, array); |
| 7493 StoreObjectFieldNoWriteBarrier(iterator, |
| 7494 JSTypedArrayIterator::kNextIndexOffset, |
| 7495 SmiConstant(Smi::FromInt(0))); |
| 7496 return iterator; |
| 7497 } |
| 7498 |
7245 } // namespace internal | 7499 } // namespace internal |
7246 } // namespace v8 | 7500 } // namespace v8 |
OLD | NEW |